From b54a9294c7b833341a3a548702ef8d5f605a613f Mon Sep 17 00:00:00 2001 From: Gar Date: Tue, 16 May 2023 09:43:06 -0700 Subject: [PATCH 01/11] deps: @npmcli/run-script@6.0.2 --- .../@npmcli/run-script/lib/run-script-pkg.js | 4 ++++ .../@npmcli/run-script/lib/signal-manager.js | 14 ++++++++------ node_modules/@npmcli/run-script/package.json | 6 +++--- package-lock.json | 8 ++++---- package.json | 2 +- 5 files changed, 20 insertions(+), 14 deletions(-) diff --git a/node_modules/@npmcli/run-script/lib/run-script-pkg.js b/node_modules/@npmcli/run-script/lib/run-script-pkg.js index cbb0a0b3a5e73..a5518285d1af1 100644 --- a/node_modules/@npmcli/run-script/lib/run-script-pkg.js +++ b/node_modules/@npmcli/run-script/lib/run-script-pkg.js @@ -94,7 +94,11 @@ const runScriptPkg = async options => { return p.catch(er => { const { signal } = er if (stdio === 'inherit' && signal) { + // by the time we reach here, the child has already exited. we send the + // signal back to ourselves again so that npm will exit with the same + // status as the child process.kill(process.pid, signal) + // just in case we don't die, reject after 500ms // this also keeps the node process open long enough to actually // get the signal, rather than terminating gracefully. diff --git a/node_modules/@npmcli/run-script/lib/signal-manager.js b/node_modules/@npmcli/run-script/lib/signal-manager.js index 7e10f859e0a68..efc00b488063f 100644 --- a/node_modules/@npmcli/run-script/lib/signal-manager.js +++ b/node_modules/@npmcli/run-script/lib/signal-manager.js @@ -1,17 +1,19 @@ const runningProcs = new Set() let handlersInstalled = false +// NOTE: these signals aren't actually forwarded anywhere. they're trapped and +// ignored until all child processes have exited. in our next breaking change +// we should rename this const forwardedSignals = [ 'SIGINT', 'SIGTERM', ] -const handleSignal = signal => { - for (const proc of runningProcs) { - proc.kill(signal) - } -} - +// no-op, this is so receiving the signal doesn't cause us to exit immediately +// instead, we exit after all children have exited when we re-send the signal +// to ourselves. see the catch handler at the bottom of run-script-pkg.js +// istanbul ignore next - this function does nothing +const handleSignal = () => {} const setupListeners = () => { for (const signal of forwardedSignals) { process.on(signal, handleSignal) diff --git a/node_modules/@npmcli/run-script/package.json b/node_modules/@npmcli/run-script/package.json index cdcf6fb0fcf82..38f6f72fa6ad9 100644 --- a/node_modules/@npmcli/run-script/package.json +++ b/node_modules/@npmcli/run-script/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/run-script", - "version": "6.0.1", + "version": "6.0.2", "description": "Run a lifecycle script for a package (descendant of npm-lifecycle)", "author": "GitHub Inc.", "license": "ISC", @@ -16,7 +16,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.14.1", + "@npmcli/template-oss": "4.15.1", "require-inject": "^1.4.4", "tap": "^16.0.1" }, @@ -41,7 +41,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.14.1", + "version": "4.15.1", "publish": "true" }, "tap": { diff --git a/package-lock.json b/package-lock.json index e73fd43244d71..ee08c1cbe97e3 100644 --- a/package-lock.json +++ b/package-lock.json @@ -88,7 +88,7 @@ "@npmcli/config": "^6.1.6", "@npmcli/map-workspaces": "^3.0.4", "@npmcli/package-json": "^3.0.0", - "@npmcli/run-script": "^6.0.1", + "@npmcli/run-script": "^6.0.2", "abbrev": "^2.0.0", "archy": "~1.0.0", "cacache": "^17.1.0", @@ -2346,9 +2346,9 @@ } }, "node_modules/@npmcli/run-script": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-6.0.1.tgz", - "integrity": "sha512-Yi04ZSold8jcbBJD/ahKMJSQCQifH8DAbMwkBvoLaTpGFxzHC3B/5ZyoVR69q/4xedz84tvi9DJOJjNe17h+LA==", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-6.0.2.tgz", + "integrity": "sha512-NCcr1uQo1k5U+SYlnIrbAh3cxy+OQT1VtqiAbxdymSlptbzBb62AjH2xXgjNCoP073hoa1CfCAcwoZ8k96C4nA==", "inBundle": true, "dependencies": { "@npmcli/node-gyp": "^3.0.0", diff --git a/package.json b/package.json index 9486e8c6ef309..080de64377772 100644 --- a/package.json +++ b/package.json @@ -57,7 +57,7 @@ "@npmcli/config": "^6.1.6", "@npmcli/map-workspaces": "^3.0.4", "@npmcli/package-json": "^3.0.0", - "@npmcli/run-script": "^6.0.1", + "@npmcli/run-script": "^6.0.2", "abbrev": "^2.0.0", "archy": "~1.0.0", "cacache": "^17.1.0", From 4e7f30a43576e1cb0fb090d8c5f2f9362a9c4294 Mon Sep 17 00:00:00 2001 From: Gar Date: Tue, 16 May 2023 09:44:00 -0700 Subject: [PATCH 02/11] deps: semver@7.5.1 --- node_modules/semver/classes/semver.js | 2 +- node_modules/semver/package.json | 6 +++--- package-lock.json | 8 ++++---- package.json | 2 +- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/node_modules/semver/classes/semver.js b/node_modules/semver/classes/semver.js index 25ee889d1492a..99dbe82db4dc5 100644 --- a/node_modules/semver/classes/semver.js +++ b/node_modules/semver/classes/semver.js @@ -16,7 +16,7 @@ class SemVer { version = version.version } } else if (typeof version !== 'string') { - throw new TypeError(`Invalid Version: ${require('util').inspect(version)}`) + throw new TypeError(`Invalid version. Must be a string. Got type "${typeof version}".`) } if (version.length > MAX_LENGTH) { diff --git a/node_modules/semver/package.json b/node_modules/semver/package.json index 0a6095b8900a6..592404a3c9d1c 100644 --- a/node_modules/semver/package.json +++ b/node_modules/semver/package.json @@ -1,6 +1,6 @@ { "name": "semver", - "version": "7.5.0", + "version": "7.5.1", "description": "The semantic version parser used by npm.", "main": "index.js", "scripts": { @@ -14,7 +14,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.13.0", + "@npmcli/template-oss": "4.14.1", "tap": "^16.0.0" }, "license": "ISC", @@ -53,7 +53,7 @@ "author": "GitHub Inc.", "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.13.0", + "version": "4.14.1", "engines": ">=10", "ciVersions": [ "10.0.0", diff --git a/package-lock.json b/package-lock.json index ee08c1cbe97e3..f05d0e278cdca 100644 --- a/package-lock.json +++ b/package-lock.json @@ -140,7 +140,7 @@ "read": "^2.1.0", "read-package-json": "^6.0.3", "read-package-json-fast": "^3.0.2", - "semver": "^7.5.0", + "semver": "^7.5.1", "ssri": "^10.0.4", "tar": "^6.1.14", "text-table": "~0.2.0", @@ -11355,9 +11355,9 @@ } }, "node_modules/semver": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.0.tgz", - "integrity": "sha512-+XC0AD/R7Q2mPSRuy2Id0+CGTZ98+8f+KvwirxOKIEyid+XSx6HbC63p+O4IndTHuX5Z+JxQ0TghCkO5Cg/2HA==", + "version": "7.5.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.1.tgz", + "integrity": "sha512-Wvss5ivl8TMRZXXESstBA4uR5iXgEN/VC5/sOcuXdVLzcdkz4HWetIoRfG5gb5X+ij/G9rw9YoGn3QoQ8OCSpw==", "inBundle": true, "dependencies": { "lru-cache": "^6.0.0" diff --git a/package.json b/package.json index 080de64377772..be496671b5895 100644 --- a/package.json +++ b/package.json @@ -109,7 +109,7 @@ "read": "^2.1.0", "read-package-json": "^6.0.3", "read-package-json-fast": "^3.0.2", - "semver": "^7.5.0", + "semver": "^7.5.1", "ssri": "^10.0.4", "tar": "^6.1.14", "text-table": "~0.2.0", From c1bdef569d550f943a0532216410da7180e1e807 Mon Sep 17 00:00:00 2001 From: Gar Date: Tue, 16 May 2023 09:46:30 -0700 Subject: [PATCH 03/11] deps: glob@10.2.4 --- node_modules/glob/dist/cjs/package.json | 4 ++-- node_modules/glob/dist/cjs/src/glob.js | 7 ++++++- node_modules/glob/dist/mjs/glob.js | 7 ++++++- node_modules/glob/dist/mjs/package.json | 2 +- node_modules/glob/package.json | 4 ++-- package-lock.json | 10 +++++----- package.json | 2 +- 7 files changed, 23 insertions(+), 13 deletions(-) diff --git a/node_modules/glob/dist/cjs/package.json b/node_modules/glob/dist/cjs/package.json index e225638de741d..8762de67dc4d5 100644 --- a/node_modules/glob/dist/cjs/package.json +++ b/node_modules/glob/dist/cjs/package.json @@ -2,7 +2,7 @@ "author": "Isaac Z. Schlueter (http://blog.izs.me/)", "name": "glob", "description": "the most correct and second fastest glob implementation in JavaScript", - "version": "10.2.2", + "version": "10.2.4", "bin": "./dist/cjs/src/bin.js", "repository": { "type": "git", @@ -63,7 +63,7 @@ "foreground-child": "^3.1.0", "jackspeak": "^2.0.3", "minimatch": "^9.0.0", - "minipass": "^5.0.0", + "minipass": "^5.0.0 || ^6.0.0", "path-scurry": "^1.7.0" }, "devDependencies": { diff --git a/node_modules/glob/dist/cjs/src/glob.js b/node_modules/glob/dist/cjs/src/glob.js index a05d9f0eb3963..e7ad4deb980d3 100644 --- a/node_modules/glob/dist/cjs/src/glob.js +++ b/node_modules/glob/dist/cjs/src/glob.js @@ -130,6 +130,11 @@ class Glob { }); } this.nocase = this.scurry.nocase; + // If you do nocase:true on a case-sensitive file system, then + // we need to use regexps instead of strings for non-magic + // path portions, because statting `aBc` won't return results + // for the file `AbC` for example. + const nocaseMagicOnly = this.platform === 'darwin' || this.platform === 'win32'; const mmo = { // default nocase based on platform ...opts, @@ -137,7 +142,7 @@ class Glob { matchBase: this.matchBase, nobrace: this.nobrace, nocase: this.nocase, - nocaseMagicOnly: true, + nocaseMagicOnly, nocomment: true, noext: this.noext, nonegate: true, diff --git a/node_modules/glob/dist/mjs/glob.js b/node_modules/glob/dist/mjs/glob.js index a246019cd35f9..f158065746e58 100644 --- a/node_modules/glob/dist/mjs/glob.js +++ b/node_modules/glob/dist/mjs/glob.js @@ -127,6 +127,11 @@ export class Glob { }); } this.nocase = this.scurry.nocase; + // If you do nocase:true on a case-sensitive file system, then + // we need to use regexps instead of strings for non-magic + // path portions, because statting `aBc` won't return results + // for the file `AbC` for example. + const nocaseMagicOnly = this.platform === 'darwin' || this.platform === 'win32'; const mmo = { // default nocase based on platform ...opts, @@ -134,7 +139,7 @@ export class Glob { matchBase: this.matchBase, nobrace: this.nobrace, nocase: this.nocase, - nocaseMagicOnly: true, + nocaseMagicOnly, nocomment: true, noext: this.noext, nonegate: true, diff --git a/node_modules/glob/dist/mjs/package.json b/node_modules/glob/dist/mjs/package.json index ff3441b45957b..e066bfabfb543 100644 --- a/node_modules/glob/dist/mjs/package.json +++ b/node_modules/glob/dist/mjs/package.json @@ -1,4 +1,4 @@ { - "version": "10.2.1", + "version": "10.2.3", "type": "module" } diff --git a/node_modules/glob/package.json b/node_modules/glob/package.json index b04d087e28d89..e11e8e3302579 100644 --- a/node_modules/glob/package.json +++ b/node_modules/glob/package.json @@ -2,7 +2,7 @@ "author": "Isaac Z. Schlueter (http://blog.izs.me/)", "name": "glob", "description": "the most correct and second fastest glob implementation in JavaScript", - "version": "10.2.2", + "version": "10.2.4", "bin": "./dist/cjs/src/bin.js", "repository": { "type": "git", @@ -63,7 +63,7 @@ "foreground-child": "^3.1.0", "jackspeak": "^2.0.3", "minimatch": "^9.0.0", - "minipass": "^5.0.0", + "minipass": "^5.0.0 || ^6.0.0", "path-scurry": "^1.7.0" }, "devDependencies": { diff --git a/package-lock.json b/package-lock.json index f05d0e278cdca..2fde2175fb1ef 100644 --- a/package-lock.json +++ b/package-lock.json @@ -99,7 +99,7 @@ "columnify": "^1.6.0", "fastest-levenshtein": "^1.0.16", "fs-minipass": "^3.0.2", - "glob": "^10.2.2", + "glob": "^10.2.4", "graceful-fs": "^4.2.11", "hosted-git-info": "^6.1.1", "ini": "^4.1.0", @@ -5730,15 +5730,15 @@ "dev": true }, "node_modules/glob": { - "version": "10.2.2", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.2.2.tgz", - "integrity": "sha512-Xsa0BcxIC6th9UwNjZkhrMtNo/MnyRL8jGCP+uEwhA5oFOCY1f2s1/oNKY47xQ0Bg5nkjsfAEIej1VeH62bDDQ==", + "version": "10.2.4", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.2.4.tgz", + "integrity": "sha512-fDboBse/sl1oXSLhIp0FcCJgzW9KmhC/q8ULTKC82zc+DL3TL7FNb8qlt5qqXN53MsKEUSIcb+7DLmEygOE5Yw==", "inBundle": true, "dependencies": { "foreground-child": "^3.1.0", "jackspeak": "^2.0.3", "minimatch": "^9.0.0", - "minipass": "^5.0.0", + "minipass": "^5.0.0 || ^6.0.0", "path-scurry": "^1.7.0" }, "bin": { diff --git a/package.json b/package.json index be496671b5895..452184aba643a 100644 --- a/package.json +++ b/package.json @@ -68,7 +68,7 @@ "columnify": "^1.6.0", "fastest-levenshtein": "^1.0.16", "fs-minipass": "^3.0.2", - "glob": "^10.2.2", + "glob": "^10.2.4", "graceful-fs": "^4.2.11", "hosted-git-info": "^6.1.1", "ini": "^4.1.0", From fcecb44209f75f84dd7efbf7d4e1ede8b34690a5 Mon Sep 17 00:00:00 2001 From: Gar Date: Tue, 16 May 2023 09:50:04 -0700 Subject: [PATCH 04/11] deps: sigstore@1.5.2 --- node_modules/sigstore/dist/ca/index.js | 19 ++- node_modules/sigstore/dist/ca/verify/chain.js | 11 +- node_modules/sigstore/dist/ca/verify/index.js | 5 +- node_modules/sigstore/dist/config.js | 19 ++- node_modules/sigstore/dist/external/fulcio.js | 4 +- node_modules/sigstore/dist/external/index.js | 4 +- node_modules/sigstore/dist/external/rekor.js | 4 +- node_modules/sigstore/dist/external/tsa.js | 47 ++++++ node_modules/sigstore/dist/sign.js | 53 +++++- node_modules/sigstore/dist/sigstore-utils.js | 7 +- node_modules/sigstore/dist/sigstore.js | 19 ++- node_modules/sigstore/dist/tlog/index.js | 13 +- node_modules/sigstore/dist/tsa/index.js | 47 ++++++ node_modules/sigstore/dist/tuf/index.js | 49 ++++-- node_modules/sigstore/dist/tuf/target.js | 2 +- node_modules/sigstore/dist/types/fetch.js | 2 + .../sigstore/dist/types/sigstore/index.js | 48 ++++-- node_modules/sigstore/dist/verify.js | 4 +- node_modules/sigstore/dist/x509/verify.js | 36 +++- node_modules/sigstore/package.json | 44 ++--- .../store/public-good-instance-root.json | 157 +----------------- package-lock.json | 6 +- 22 files changed, 342 insertions(+), 258 deletions(-) create mode 100644 node_modules/sigstore/dist/external/tsa.js create mode 100644 node_modules/sigstore/dist/tsa/index.js create mode 100644 node_modules/sigstore/dist/types/fetch.js diff --git a/node_modules/sigstore/dist/ca/index.js b/node_modules/sigstore/dist/ca/index.js index 7e0f9e0c5c4c0..340dd46609aad 100644 --- a/node_modules/sigstore/dist/ca/index.js +++ b/node_modules/sigstore/dist/ca/index.js @@ -6,13 +6,26 @@ const external_1 = require("../external"); const format_1 = require("./format"); class CAClient { constructor(options) { - this.fulcio = new external_1.Fulcio({ baseURL: options.fulcioBaseURL }); + this.fulcio = new external_1.Fulcio({ + baseURL: options.fulcioBaseURL, + retry: options.retry, + timeout: options.timeout, + }); } async createSigningCertificate(identityToken, publicKey, challenge) { const request = (0, format_1.toCertificateRequest)(identityToken, publicKey, challenge); try { - const certificate = await this.fulcio.createSigningCertificate(request); - return certificate.signedCertificateEmbeddedSct.chain.certificates; + const resp = await this.fulcio.createSigningCertificate(request); + // Account for the fact that the response may contain either a + // signedCertificateEmbeddedSct or a signedCertificateDetachedSct. + const cert = resp.signedCertificateEmbeddedSct + ? resp.signedCertificateEmbeddedSct + : resp.signedCertificateDetachedSct; + // Return the first certificate in the chain, which is the signing + // certificate. Specifically not returning the rest of the chain to + // mitigate the risk of errors when verifying the certificate chain. + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + return cert.chain.certificates.slice(0, 1); } catch (err) { throw new error_1.InternalError({ diff --git a/node_modules/sigstore/dist/ca/verify/chain.js b/node_modules/sigstore/dist/ca/verify/chain.js index 0f6f714695728..3246c7a154e2d 100644 --- a/node_modules/sigstore/dist/ca/verify/chain.js +++ b/node_modules/sigstore/dist/ca/verify/chain.js @@ -19,12 +19,11 @@ limitations under the License. const error_1 = require("../../error"); const cert_1 = require("../../x509/cert"); const verify_1 = require("../../x509/verify"); -function verifyChain(bundleCerts, certificateAuthorities) { - const certs = parseCerts(bundleCerts); - const signingCert = certs[0]; +function verifyChain(certificate, certificateAuthorities) { + const untrustedCert = cert_1.x509Certificate.parse(certificate.rawBytes); // Filter the list of certificate authorities to those which are valid for the // signing certificate's notBefore date. - const validCAs = filterCertificateAuthorities(certificateAuthorities, signingCert.notBefore); + const validCAs = filterCertificateAuthorities(certificateAuthorities, untrustedCert.notBefore); if (validCAs.length === 0) { throw new error_1.VerificationError('No valid certificate authorities'); } @@ -34,9 +33,9 @@ function verifyChain(bundleCerts, certificateAuthorities) { const trustedCerts = parseCerts(ca.certChain?.certificates || []); try { trustedChain = (0, verify_1.verifyCertificateChain)({ + untrustedCert, trustedCerts, - certs, - validAt: signingCert.notBefore, + validAt: untrustedCert.notBefore, }); return true; } diff --git a/node_modules/sigstore/dist/ca/verify/index.js b/node_modules/sigstore/dist/ca/verify/index.js index 9c42f3094338f..32f85c828fe5a 100644 --- a/node_modules/sigstore/dist/ca/verify/index.js +++ b/node_modules/sigstore/dist/ca/verify/index.js @@ -6,8 +6,9 @@ const sct_1 = require("./sct"); const signer_1 = require("./signer"); function verifySigningCertificate(bundle, trustedRoot, options) { // Check that a trusted certificate chain can be found for the signing - // certificate in the bundle - const trustedChain = (0, chain_1.verifyChain)(bundle.verificationMaterial.content.x509CertificateChain.certificates, trustedRoot.certificateAuthorities); + // certificate in the bundle. Only the first certificate in the bundle's + // chain is used -- everything else must come from the trusted root. + const trustedChain = (0, chain_1.verifyChain)(bundle.verificationMaterial.content.x509CertificateChain.certificates[0], trustedRoot.certificateAuthorities); // Unless disabled, verify the SCTs in the signing certificate if (options.ctlogOptions.disable === false) { (0, sct_1.verifySCTs)(trustedChain, trustedRoot.ctlogs, options.ctlogOptions); diff --git a/node_modules/sigstore/dist/config.js b/node_modules/sigstore/dist/config.js index 7e6e42d9bf369..1a22c5fef313b 100644 --- a/node_modules/sigstore/dist/config.js +++ b/node_modules/sigstore/dist/config.js @@ -26,7 +26,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.identityProviders = exports.artifactVerificationOptions = exports.createTLogClient = exports.createCAClient = exports.DEFAULT_REKOR_URL = exports.DEFAULT_FULCIO_URL = void 0; +exports.identityProviders = exports.artifactVerificationOptions = exports.createTSAClient = exports.createTLogClient = exports.createCAClient = exports.DEFAULT_TIMEOUT = exports.DEFAULT_RETRY = exports.DEFAULT_REKOR_URL = exports.DEFAULT_FULCIO_URL = void 0; /* Copyright 2023 The Sigstore Authors. @@ -45,21 +45,38 @@ limitations under the License. const ca_1 = require("./ca"); const identity_1 = __importDefault(require("./identity")); const tlog_1 = require("./tlog"); +const tsa_1 = require("./tsa"); const sigstore = __importStar(require("./types/sigstore")); exports.DEFAULT_FULCIO_URL = 'https://fulcio.sigstore.dev'; exports.DEFAULT_REKOR_URL = 'https://rekor.sigstore.dev'; +exports.DEFAULT_RETRY = { retries: 2 }; +exports.DEFAULT_TIMEOUT = 5000; function createCAClient(options) { return new ca_1.CAClient({ fulcioBaseURL: options.fulcioURL || exports.DEFAULT_FULCIO_URL, + retry: options.retry ?? exports.DEFAULT_RETRY, + timeout: options.timeout ?? exports.DEFAULT_TIMEOUT, }); } exports.createCAClient = createCAClient; function createTLogClient(options) { return new tlog_1.TLogClient({ rekorBaseURL: options.rekorURL || exports.DEFAULT_REKOR_URL, + retry: options.retry ?? exports.DEFAULT_RETRY, + timeout: options.timeout ?? exports.DEFAULT_TIMEOUT, }); } exports.createTLogClient = createTLogClient; +function createTSAClient(options) { + return options.tsaServerURL + ? new tsa_1.TSAClient({ + tsaBaseURL: options.tsaServerURL, + retry: options.retry ?? exports.DEFAULT_RETRY, + timeout: options.timeout ?? exports.DEFAULT_TIMEOUT, + }) + : undefined; +} +exports.createTSAClient = createTSAClient; // Assembles the AtifactVerificationOptions from the supplied VerifyOptions. function artifactVerificationOptions(options) { // The trusted signers are only used if the options contain a certificate diff --git a/node_modules/sigstore/dist/external/fulcio.js b/node_modules/sigstore/dist/external/fulcio.js index 288ca32caaea7..aeb48d58d8d83 100644 --- a/node_modules/sigstore/dist/external/fulcio.js +++ b/node_modules/sigstore/dist/external/fulcio.js @@ -28,8 +28,8 @@ const error_1 = require("./error"); class Fulcio { constructor(options) { this.fetch = make_fetch_happen_1.default.defaults({ - retry: { retries: 2 }, - timeout: 5000, + retry: options.retry, + timeout: options.timeout, headers: { 'Content-Type': 'application/json', 'User-Agent': util_1.ua.getUserAgent(), diff --git a/node_modules/sigstore/dist/external/index.js b/node_modules/sigstore/dist/external/index.js index da5f084001279..f40816e9b7ca4 100644 --- a/node_modules/sigstore/dist/external/index.js +++ b/node_modules/sigstore/dist/external/index.js @@ -1,6 +1,6 @@ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); -exports.Rekor = exports.Fulcio = exports.HTTPError = void 0; +exports.TimestampAuthority = exports.Rekor = exports.Fulcio = exports.HTTPError = void 0; /* Copyright 2022 The Sigstore Authors. @@ -22,3 +22,5 @@ var fulcio_1 = require("./fulcio"); Object.defineProperty(exports, "Fulcio", { enumerable: true, get: function () { return fulcio_1.Fulcio; } }); var rekor_1 = require("./rekor"); Object.defineProperty(exports, "Rekor", { enumerable: true, get: function () { return rekor_1.Rekor; } }); +var tsa_1 = require("./tsa"); +Object.defineProperty(exports, "TimestampAuthority", { enumerable: true, get: function () { return tsa_1.TimestampAuthority; } }); diff --git a/node_modules/sigstore/dist/external/rekor.js b/node_modules/sigstore/dist/external/rekor.js index 6bb085c44cecd..80650ce02ff9b 100644 --- a/node_modules/sigstore/dist/external/rekor.js +++ b/node_modules/sigstore/dist/external/rekor.js @@ -28,8 +28,8 @@ const error_1 = require("./error"); class Rekor { constructor(options) { this.fetch = make_fetch_happen_1.default.defaults({ - retry: { retries: 2 }, - timeout: 5000, + retry: options.retry, + timeout: options.timeout, headers: { Accept: 'application/json', 'User-Agent': util_1.ua.getUserAgent(), diff --git a/node_modules/sigstore/dist/external/tsa.js b/node_modules/sigstore/dist/external/tsa.js new file mode 100644 index 0000000000000..5277d7d3f9707 --- /dev/null +++ b/node_modules/sigstore/dist/external/tsa.js @@ -0,0 +1,47 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TimestampAuthority = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const make_fetch_happen_1 = __importDefault(require("make-fetch-happen")); +const util_1 = require("../util"); +const error_1 = require("./error"); +class TimestampAuthority { + constructor(options) { + this.fetch = make_fetch_happen_1.default.defaults({ + retry: options.retry, + timeout: options.timeout, + headers: { + 'Content-Type': 'application/json', + 'User-Agent': util_1.ua.getUserAgent(), + }, + }); + this.baseUrl = options.baseURL; + } + async createTimestamp(request) { + const url = `${this.baseUrl}/api/v1/timestamp`; + const response = await this.fetch(url, { + method: 'POST', + body: JSON.stringify(request), + }); + (0, error_1.checkStatus)(response); + return response.buffer(); + } +} +exports.TimestampAuthority = TimestampAuthority; diff --git a/node_modules/sigstore/dist/sign.js b/node_modules/sigstore/dist/sign.js index 97c3da04b065b..96e6272750b49 100644 --- a/node_modules/sigstore/dist/sign.js +++ b/node_modules/sigstore/dist/sign.js @@ -1,13 +1,39 @@ "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.Signer = void 0; +const sigstore = __importStar(require("./types/sigstore")); const util_1 = require("./util"); class Signer { constructor(options) { this.identityProviders = []; this.ca = options.ca; this.tlog = options.tlog; + this.tsa = options.tsa; this.identityProviders = options.identityProviders; + this.tlogUpload = options.tlogUpload ?? true; this.signer = options.signer || this.signWithEphemeralKey.bind(this); } async signBlob(payload) { @@ -15,8 +41,18 @@ class Signer { const sigMaterial = await this.signer(payload); // Calculate artifact digest const digest = util_1.crypto.hash(payload); - // Create Rekor entry - return this.tlog.createMessageSignatureEntry(digest, sigMaterial); + // Create a Rekor entry (if tlogUpload is enabled) + const entry = this.tlogUpload + ? await this.tlog.createMessageSignatureEntry(digest, sigMaterial) + : undefined; + return sigstore.toMessageSignatureBundle({ + digest, + signature: sigMaterial, + tlogEntry: entry, + timestamp: this.tsa + ? await this.tsa.createTimestamp(sigMaterial.signature) + : undefined, + }); } async signAttestation(payload, payloadType) { // Pre-authentication encoding to be signed @@ -33,7 +69,18 @@ class Signer { }, ], }; - return this.tlog.createDSSEEntry(envelope, sigMaterial); + // Create a Rekor entry (if tlogUpload is enabled) + const entry = this.tlogUpload + ? await this.tlog.createDSSEEntry(envelope, sigMaterial) + : undefined; + return sigstore.toDSSEBundle({ + envelope, + signature: sigMaterial, + tlogEntry: entry, + timestamp: this.tsa + ? await this.tsa.createTimestamp(sigMaterial.signature) + : undefined, + }); } async signWithEphemeralKey(payload) { // Create emphemeral key pair diff --git a/node_modules/sigstore/dist/sigstore-utils.js b/node_modules/sigstore/dist/sigstore-utils.js index 79918a806b17d..1341052047229 100644 --- a/node_modules/sigstore/dist/sigstore-utils.js +++ b/node_modules/sigstore/dist/sigstore-utils.js @@ -67,9 +67,14 @@ async function createRekorEntry(dsseEnvelope, publicKey, options = {}) { const envelope = sigstore.Envelope.fromJSON(dsseEnvelope); const tlog = (0, config_1.createTLogClient)(options); const sigMaterial = (0, signature_1.extractSignatureMaterial)(envelope, publicKey); - const bundle = await tlog.createDSSEEntry(envelope, sigMaterial, { + const entry = await tlog.createDSSEEntry(envelope, sigMaterial, { fetchOnConflict: true, }); + const bundle = sigstore.toDSSEBundle({ + envelope, + signature: sigMaterial, + tlogEntry: entry, + }); return sigstore.Bundle.toJSON(bundle); } exports.createRekorEntry = createRekorEntry; diff --git a/node_modules/sigstore/dist/sigstore.js b/node_modules/sigstore/dist/sigstore.js index f45270217b017..8d245e17b2a0c 100644 --- a/node_modules/sigstore/dist/sigstore.js +++ b/node_modules/sigstore/dist/sigstore.js @@ -52,6 +52,7 @@ async function sign(payload, options = {}) { ca, tlog, identityProviders: idps, + tlogUpload: options.tlogUpload, }); const bundle = await signer.signBlob(payload); return sigstore.Bundle.toJSON(bundle); @@ -60,11 +61,14 @@ exports.sign = sign; async function attest(payload, payloadType, options = {}) { const ca = config.createCAClient(options); const tlog = config.createTLogClient(options); + const tsa = config.createTSAClient(options); const idps = config.identityProviders(options); const signer = new sign_1.Signer({ ca, tlog, + tsa, identityProviders: idps, + tlogUpload: options.tlogUpload, }); const bundle = await signer.signAttestation(payload, payloadType); return sigstore.Bundle.toJSON(bundle); @@ -75,6 +79,8 @@ async function verify(bundle, payload, options = {}) { mirrorURL: options.tufMirrorURL, rootPath: options.tufRootPath, cachePath: options.tufCachePath, + retry: options.retry ?? config.DEFAULT_RETRY, + timeout: options.timeout ?? config.DEFAULT_TIMEOUT, }); const verifier = new verify_1.Verifier(trustedRoot, options.keySelector); const deserializedBundle = sigstore.bundleFromJSON(bundle); @@ -83,12 +89,21 @@ async function verify(bundle, payload, options = {}) { } exports.verify = verify; const tufUtils = { - getTarget: (path, options = {}) => { - return tuf.getTarget(path, { + client: (options = {}) => { + const t = new tuf.TUFClient({ mirrorURL: options.tufMirrorURL, rootPath: options.tufRootPath, cachePath: options.tufCachePath, + retry: options.retry ?? config.DEFAULT_RETRY, + timeout: options.timeout ?? config.DEFAULT_TIMEOUT, }); + return t.refresh().then(() => t); + }, + /* + * @deprecated Use tufUtils.client instead. + */ + getTarget: (path, options = {}) => { + return tufUtils.client(options).then((t) => t.getTarget(path)); }, }; exports.tuf = tufUtils; diff --git a/node_modules/sigstore/dist/tlog/index.js b/node_modules/sigstore/dist/tlog/index.js index 4193e55752ff0..7f5f531983b37 100644 --- a/node_modules/sigstore/dist/tlog/index.js +++ b/node_modules/sigstore/dist/tlog/index.js @@ -18,21 +18,22 @@ limitations under the License. */ const error_1 = require("../error"); const external_1 = require("../external"); -const sigstore_1 = require("../types/sigstore"); const format_1 = require("./format"); class TLogClient { constructor(options) { - this.rekor = new external_1.Rekor({ baseURL: options.rekorBaseURL }); + this.rekor = new external_1.Rekor({ + baseURL: options.rekorBaseURL, + retry: options.retry, + timeout: options.timeout, + }); } async createMessageSignatureEntry(digest, sigMaterial, options = {}) { const proposedEntry = (0, format_1.toProposedHashedRekordEntry)(digest, sigMaterial); - const entry = await this.createEntry(proposedEntry, options.fetchOnConflict); - return sigstore_1.bundle.toMessageSignatureBundle(digest, sigMaterial, entry); + return this.createEntry(proposedEntry, options.fetchOnConflict); } async createDSSEEntry(envelope, sigMaterial, options = {}) { const proposedEntry = (0, format_1.toProposedIntotoEntry)(envelope, sigMaterial); - const entry = await this.createEntry(proposedEntry, options.fetchOnConflict); - return sigstore_1.bundle.toDSSEBundle(envelope, sigMaterial, entry); + return this.createEntry(proposedEntry, options.fetchOnConflict); } async createEntry(proposedEntry, fetchOnConflict = false) { let entry; diff --git a/node_modules/sigstore/dist/tsa/index.js b/node_modules/sigstore/dist/tsa/index.js new file mode 100644 index 0000000000000..4951b24a93f4f --- /dev/null +++ b/node_modules/sigstore/dist/tsa/index.js @@ -0,0 +1,47 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TSAClient = void 0; +/* +Copyright 2022 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../error"); +const external_1 = require("../external"); +const util_1 = require("../util"); +class TSAClient { + constructor(options) { + this.tsa = new external_1.TimestampAuthority({ + baseURL: options.tsaBaseURL, + retry: options.retry, + timeout: options.timeout, + }); + } + async createTimestamp(signature) { + const request = { + artifactHash: util_1.crypto.hash(signature).toString('base64'), + hashAlgorithm: 'sha256', + }; + try { + return await this.tsa.createTimestamp(request); + } + catch (err) { + throw new error_1.InternalError({ + code: 'TSA_CREATE_TIMESTAMP_ERROR', + message: 'error creating timestamp', + cause: err, + }); + } + } +} +exports.TSAClient = TSAClient; diff --git a/node_modules/sigstore/dist/tuf/index.js b/node_modules/sigstore/dist/tuf/index.js index 89923d63fa657..86a081de9f3af 100644 --- a/node_modules/sigstore/dist/tuf/index.js +++ b/node_modules/sigstore/dist/tuf/index.js @@ -26,7 +26,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.getTarget = exports.getTrustedRoot = void 0; +exports.TUFClient = exports.getTrustedRoot = void 0; /* Copyright 2023 The Sigstore Authors. @@ -53,20 +53,28 @@ const DEFAULT_CACHE_DIR = util_1.appdata.appDataPath('sigstore-js'); const DEFAULT_MIRROR_URL = 'https://tuf-repo-cdn.sigstore.dev'; const DEFAULT_TUF_ROOT_PATH = '../../store/public-good-instance-root.json'; async function getTrustedRoot(options = {}) { - const trustedRoot = await getTarget(TRUSTED_ROOT_TARGET, options); + const client = new TUFClient(options); + const trustedRoot = await client.getTarget(TRUSTED_ROOT_TARGET); return sigstore.TrustedRoot.fromJSON(JSON.parse(trustedRoot)); } exports.getTrustedRoot = getTrustedRoot; -async function getTarget(targetName, options = {}) { - const cachePath = options.cachePath || DEFAULT_CACHE_DIR; - const tufRootPath = options.rootPath || require.resolve(DEFAULT_TUF_ROOT_PATH); - const mirrorURL = options.mirrorURL || DEFAULT_MIRROR_URL; - initTufCache(cachePath, tufRootPath); - const remote = initRemoteConfig(cachePath, mirrorURL); - const repoClient = initClient(cachePath, remote); - return (0, target_1.readTarget)(repoClient, targetName); +class TUFClient { + constructor(options) { + const cachePath = options.cachePath || DEFAULT_CACHE_DIR; + const tufRootPath = options.rootPath || require.resolve(DEFAULT_TUF_ROOT_PATH); + const mirrorURL = options.mirrorURL || DEFAULT_MIRROR_URL; + initTufCache(cachePath, tufRootPath); + const remote = initRemoteConfig(cachePath, mirrorURL); + this.updater = initClient(cachePath, remote, options); + } + async refresh() { + return this.updater.refresh(); + } + getTarget(targetName) { + return (0, target_1.readTarget)(this.updater, targetName); + } } -exports.getTarget = getTarget; +exports.TUFClient = TUFClient; // Initializes the TUF cache directory structure including the initial // root.json file. If the cache directory does not exist, it will be // created. If the targets directory does not exist, it will be created. @@ -102,12 +110,29 @@ function initRemoteConfig(rootDir, mirrorURL) { } return remoteConfig; } -function initClient(cachePath, remote) { +function initClient(cachePath, remote, options) { const baseURL = remote.mirror; + const config = { + fetchTimeout: options.timeout, + }; + // tuf-js only supports a number for fetchRetries so we have to + // convert the boolean and object options to a number. + if (typeof options.retry !== 'undefined') { + if (typeof options.retry === 'number') { + config.fetchRetries = options.retry; + } + else if (typeof options.retry === 'object') { + config.fetchRetries = options.retry.retries; + } + else if (options.retry === true) { + config.fetchRetries = 1; + } + } return new tuf_js_1.Updater({ metadataBaseUrl: baseURL, targetBaseUrl: `${baseURL}/targets`, metadataDir: cachePath, targetDir: path_1.default.join(cachePath, 'targets'), + config, }); } diff --git a/node_modules/sigstore/dist/tuf/target.js b/node_modules/sigstore/dist/tuf/target.js index b79411c3dd0a4..d7df61e5a4076 100644 --- a/node_modules/sigstore/dist/tuf/target.js +++ b/node_modules/sigstore/dist/tuf/target.js @@ -46,7 +46,7 @@ exports.readTarget = readTarget; async function getTargetPath(tuf, target) { let targetInfo; try { - targetInfo = await tuf.refresh().then(() => tuf.getTargetInfo(target)); + targetInfo = await tuf.getTargetInfo(target); } catch (err) { throw new error_1.InternalError({ diff --git a/node_modules/sigstore/dist/types/fetch.js b/node_modules/sigstore/dist/types/fetch.js new file mode 100644 index 0000000000000..c8ad2e549bdc6 --- /dev/null +++ b/node_modules/sigstore/dist/types/fetch.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/sigstore/dist/types/sigstore/index.js b/node_modules/sigstore/dist/types/sigstore/index.js index 9fcdb42bdcf34..544db63b002bf 100644 --- a/node_modules/sigstore/dist/types/sigstore/index.js +++ b/node_modules/sigstore/dist/types/sigstore/index.js @@ -14,7 +14,7 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) { for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.signingCertificate = exports.bundle = exports.isVerifiableTransparencyLogEntry = exports.isCAVerificationOptions = exports.isBundleWithCertificateChain = exports.isBundleWithVerificationMaterial = exports.bundleFromJSON = void 0; +exports.signingCertificate = exports.toMessageSignatureBundle = exports.toDSSEBundle = exports.isVerifiableTransparencyLogEntry = exports.isCAVerificationOptions = exports.isBundleWithCertificateChain = exports.isBundleWithVerificationMaterial = exports.bundleFromJSON = void 0; /* Copyright 2023 The Sigstore Authors. @@ -69,16 +69,20 @@ function isVerifiableTransparencyLogEntry(entry) { entry.kindVersion !== undefined); } exports.isVerifiableTransparencyLogEntry = isVerifiableTransparencyLogEntry; -exports.bundle = { - toDSSEBundle: (envelope, signature, rekorEntry) => ({ +function toDSSEBundle({ envelope, signature, tlogEntry, timestamp, }) { + return { mediaType: BUNDLE_MEDIA_TYPE, - content: { - $case: 'dsseEnvelope', - dsseEnvelope: envelope, - }, - verificationMaterial: toVerificationMaterial(signature, rekorEntry), - }), - toMessageSignatureBundle: (digest, signature, rekorEntry) => ({ + content: { $case: 'dsseEnvelope', dsseEnvelope: envelope }, + verificationMaterial: toVerificationMaterial({ + signature, + tlogEntry, + timestamp, + }), + }; +} +exports.toDSSEBundle = toDSSEBundle; +function toMessageSignatureBundle({ digest, signature, tlogEntry, timestamp, }) { + return { mediaType: BUNDLE_MEDIA_TYPE, content: { $case: 'messageSignature', @@ -90,9 +94,14 @@ exports.bundle = { signature: signature.signature, }, }, - verificationMaterial: toVerificationMaterial(signature, rekorEntry), - }), -}; + verificationMaterial: toVerificationMaterial({ + signature, + tlogEntry, + timestamp, + }), + }; +} +exports.toMessageSignatureBundle = toMessageSignatureBundle; function toTransparencyLogEntry(entry) { const set = Buffer.from(entry.verification.signedEntryTimestamp, 'base64'); const logID = Buffer.from(entry.logID, 'hex'); @@ -116,13 +125,15 @@ function toTransparencyLogEntry(entry) { canonicalizedBody: Buffer.from(entry.body, 'base64'), }; } -function toVerificationMaterial(signature, entry) { +function toVerificationMaterial({ signature, tlogEntry, timestamp, }) { return { content: signature.certificates ? toVerificationMaterialx509CertificateChain(signature.certificates) : toVerificationMaterialPublicKey(signature.key.id || ''), - tlogEntries: [toTransparencyLogEntry(entry)], - timestampVerificationData: undefined, + tlogEntries: tlogEntry ? [toTransparencyLogEntry(tlogEntry)] : [], + timestampVerificationData: timestamp + ? toTimestampVerificationData(timestamp) + : undefined, }; } function toVerificationMaterialx509CertificateChain(certificates) { @@ -138,6 +149,11 @@ function toVerificationMaterialx509CertificateChain(certificates) { function toVerificationMaterialPublicKey(hint) { return { $case: 'publicKey', publicKey: { hint } }; } +function toTimestampVerificationData(timestamp) { + return { + rfc3161Timestamps: [{ signedTimestamp: timestamp }], + }; +} function signingCertificate(bundle) { if (!isBundleWithCertificateChain(bundle)) { return undefined; diff --git a/node_modules/sigstore/dist/verify.js b/node_modules/sigstore/dist/verify.js index 9d21b553ac523..49f63d93abb26 100644 --- a/node_modules/sigstore/dist/verify.js +++ b/node_modules/sigstore/dist/verify.js @@ -41,7 +41,9 @@ class Verifier { if (sigstore.isBundleWithCertificateChain(bundle)) { this.verifySigningCertificate(bundle, options); } - this.verifyTLogEntries(bundle, options); + if (options.tlogOptions.disable === false) { + this.verifyTLogEntries(bundle, options); + } } // Performs bundle signature verification. Determines the type of the bundle // content and delegates to the appropriate signature verification function. diff --git a/node_modules/sigstore/dist/x509/verify.js b/node_modules/sigstore/dist/x509/verify.js index cc34a9ea23abe..b4c7f39912a84 100644 --- a/node_modules/sigstore/dist/x509/verify.js +++ b/node_modules/sigstore/dist/x509/verify.js @@ -24,15 +24,15 @@ function verifyCertificateChain(opts) { exports.verifyCertificateChain = verifyCertificateChain; class CertificateChainVerifier { constructor(opts) { - this.certs = opts.certs; + this.untrustedCert = opts.untrustedCert; this.trustedCerts = opts.trustedCerts; - this.localCerts = dedupeCertificates([...opts.trustedCerts, ...opts.certs]); + this.localCerts = dedupeCertificates([ + ...opts.trustedCerts, + opts.untrustedCert, + ]); this.validAt = opts.validAt || new Date(); } verify() { - if (this.certs.length === 0) { - throw new error_1.VerificationError('No certificates provided'); - } // Construct certificate path from leaf to root const certificatePath = this.sort(); // Perform validation checks on each certificate in the path @@ -41,7 +41,7 @@ class CertificateChainVerifier { return certificatePath; } sort() { - const leafCert = this.localCerts[this.localCerts.length - 1]; + const leafCert = this.untrustedCert; // Construct all possible paths from the leaf let paths = this.buildPaths(leafCert); // Filter for paths which contain a trusted certificate @@ -52,7 +52,9 @@ class CertificateChainVerifier { // Find the shortest of possible paths const path = paths.reduce((prev, curr) => prev.length < curr.length ? prev : curr); // Construct chain from shortest path - return [leafCert, ...path]; + // Removes the last certificate in the path, which will be a second copy + // of the root certificate given that the root is self-signed. + return [leafCert, ...path].slice(0, -1); } // Recursively build all possible paths from the leaf to the root buildPaths(certificate) { @@ -123,8 +125,8 @@ class CertificateChainVerifier { return issuers; } checkPath(path) { - if (path.length < 2) { - throw new error_1.VerificationError('Certificate chain must contain at least two certificates'); + if (path.length < 1) { + throw new error_1.VerificationError('Certificate chain must contain at least one certificate'); } // Check that all certificates are valid at the check date const validForDate = path.every((cert) => cert.validForDate(this.validAt)); @@ -143,6 +145,22 @@ class CertificateChainVerifier { throw new error_1.VerificationError('Incorrect certificate name chaining'); } } + // Check pathlength constraints + for (let i = 0; i < path.length; i++) { + const cert = path[i]; + // If the certificate is a CA, check the path length + if (cert.extBasicConstraints?.isCA) { + const pathLength = cert.extBasicConstraints.pathLenConstraint; + // The path length, if set, indicates how many intermediate + // certificates (NOT including the leaf) are allowed to follow. The + // pathLength constraint of any intermediate CA certificate MUST be + // greater than or equal to it's own depth in the chain (with an + // adjustment for the leaf certificate) + if (pathLength !== undefined && pathLength < i - 1) { + throw new error_1.VerificationError('Path length constraint exceeded'); + } + } + } } } // Remove duplicate certificates from the array diff --git a/node_modules/sigstore/package.json b/node_modules/sigstore/package.json index 2df3467186765..2ca34e2a445ad 100644 --- a/node_modules/sigstore/package.json +++ b/node_modules/sigstore/package.json @@ -1,58 +1,40 @@ { "name": "sigstore", - "version": "1.4.0", + "version": "1.5.2", "description": "code-signing for npm packages", "main": "dist/index.js", "types": "dist/index.d.ts", "scripts": { - "build": "tsc", - "test": "jest", - "test:watch": "jest --watch", - "test:ci": "jest --maxWorkers=2 --coverage", - "lint": "eslint --fix --ext .ts src/**", - "lint:check": "eslint --max-warnings 0 --ext .ts src/**", - "format": "prettier --write \"src/**/*\"", - "release": "npm run build && changeset publish", - "codegen:rekor": "./hack/generate-rekor-types" + "build": "tsc --build", + "test": "jest" }, "bin": { "sigstore": "bin/sigstore.js" }, - "repository": { - "type": "git", - "url": "git+https://github.com/sigstore/sigstore-js.git" - }, - "publishConfig": { - "provenance": true - }, "files": [ "dist", "store" ], "author": "bdehamer@github.com", "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/sigstore-js.git" + }, "bugs": { "url": "https://github.com/sigstore/sigstore-js/issues" }, - "homepage": "https://github.com/sigstore/sigstore-js#readme", + "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/client#readme", + "publishConfig": { + "provenance": true + }, "devDependencies": { - "@changesets/cli": "^2.26.0", "@total-typescript/shoehorn": "^0.1.0", - "@tsconfig/node14": "^1.0.3", "@tufjs/repo-mock": "^1.1.0", - "@types/jest": "^29.4.0", "@types/make-fetch-happen": "^10.0.0", - "@types/node": "^18.6.5", - "@typescript-eslint/eslint-plugin": "^5.26.0", - "@typescript-eslint/parser": "^5.26.0", - "eslint": "^8.16.0", - "eslint-config-prettier": "^8.5.0", - "eslint-plugin-prettier": "^4.0.0", - "jest": "^29.4.1", - "json-schema-to-typescript": "^12.0.0", + "@types/node": "^20.0.0", + "json-schema-to-typescript": "^13.0.0", "nock": "^13.2.4", - "prettier": "^2.6.2", - "ts-jest": "^29.0.5", "typescript": "^5.0.2" }, "dependencies": { diff --git a/node_modules/sigstore/store/public-good-instance-root.json b/node_modules/sigstore/store/public-good-instance-root.json index 38f80f940473a..e95c7e88cdf09 100644 --- a/node_modules/sigstore/store/public-good-instance-root.json +++ b/node_modules/sigstore/store/public-good-instance-root.json @@ -1,156 +1 @@ -{ - "signed": { - "_type": "root", - "spec_version": "1.0", - "version": 5, - "expires": "2023-04-18T18:13:43Z", - "keys": { - "25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99": { - "keytype": "ecdsa-sha2-nistp256", - "scheme": "ecdsa-sha2-nistp256", - "keyid_hash_algorithms": [ - "sha256", - "sha512" - ], - "keyval": { - "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEEXsz3SZXFb8jMV42j6pJlyjbjR8K\nN3Bwocexq6LMIb5qsWKOQvLN16NUefLc4HswOoumRsVVaajSpQS6fobkRw==\n-----END PUBLIC KEY-----\n" - } - }, - "2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de": { - "keytype": "ecdsa-sha2-nistp256", - "scheme": "ecdsa-sha2-nistp256", - "keyid_hash_algorithms": [ - "sha256", - "sha512" - ], - "keyval": { - "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE0ghrh92Lw1Yr3idGV5WqCtMDB8Cx\n+D8hdC4w2ZLNIplVRoVGLskYa3gheMyOjiJ8kPi15aQ2//7P+oj7UvJPGw==\n-----END PUBLIC KEY-----\n" - } - }, - "45b283825eb184cabd582eb17b74fc8ed404f68cf452acabdad2ed6f90ce216b": { - "keytype": "ecdsa-sha2-nistp256", - "scheme": "ecdsa-sha2-nistp256", - "keyid_hash_algorithms": [ - "sha256", - "sha512" - ], - "keyval": { - "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAELrWvNt94v4R085ELeeCMxHp7PldF\n0/T1GxukUh2ODuggLGJE0pc1e8CSBf6CS91Fwo9FUOuRsjBUld+VqSyCdQ==\n-----END PUBLIC KEY-----\n" - } - }, - "7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b": { - "keytype": "ecdsa-sha2-nistp256", - "scheme": "ecdsa-sha2-nistp256", - "keyid_hash_algorithms": [ - "sha256", - "sha512" - ], - "keyval": { - "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEinikSsAQmYkNeH5eYq/CnIzLaacO\nxlSaawQDOwqKy/tCqxq5xxPSJc21K4WIhs9GyOkKfzueY3GILzcMJZ4cWw==\n-----END PUBLIC KEY-----\n" - } - }, - "e1863ba02070322ebc626dcecf9d881a3a38c35c3b41a83765b6ad6c37eaec2a": { - "keytype": "ecdsa-sha2-nistp256", - "scheme": "ecdsa-sha2-nistp256", - "keyid_hash_algorithms": [ - "sha256", - "sha512" - ], - "keyval": { - "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEWRiGr5+j+3J5SsH+Ztr5nE2H2wO7\nBV+nO3s93gLca18qTOzHY1oWyAGDykMSsGTUBSt9D+An0KfKsD2mfSM42Q==\n-----END PUBLIC KEY-----\n" - } - }, - "f5312f542c21273d9485a49394386c4575804770667f2ddb59b3bf0669fddd2f": { - "keytype": "ecdsa-sha2-nistp256", - "scheme": "ecdsa-sha2-nistp256", - "keyid_hash_algorithms": [ - "sha256", - "sha512" - ], - "keyval": { - "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEzBzVOmHCPojMVLSI364WiiV8NPrD\n6IgRxVliskz/v+y3JER5mcVGcONliDcWMC5J2lfHmjPNPhb4H7xm8LzfSA==\n-----END PUBLIC KEY-----\n" - } - }, - "ff51e17fcf253119b7033f6f57512631da4a0969442afcf9fc8b141c7f2be99c": { - "keytype": "ecdsa-sha2-nistp256", - "scheme": "ecdsa-sha2-nistp256", - "keyid_hash_algorithms": [ - "sha256", - "sha512" - ], - "keyval": { - "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEy8XKsmhBYDI8Jc0GwzBxeKax0cm5\nSTKEU65HPFunUn41sT8pi0FjM4IkHz/YUmwmLUO0Wt7lxhj6BkLIK4qYAw==\n-----END PUBLIC KEY-----\n" - } - } - }, - "roles": { - "root": { - "keyids": [ - "ff51e17fcf253119b7033f6f57512631da4a0969442afcf9fc8b141c7f2be99c", - "25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99", - "f5312f542c21273d9485a49394386c4575804770667f2ddb59b3bf0669fddd2f", - "7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b", - "2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de" - ], - "threshold": 3 - }, - "snapshot": { - "keyids": [ - "45b283825eb184cabd582eb17b74fc8ed404f68cf452acabdad2ed6f90ce216b" - ], - "threshold": 1 - }, - "targets": { - "keyids": [ - "ff51e17fcf253119b7033f6f57512631da4a0969442afcf9fc8b141c7f2be99c", - "25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99", - "f5312f542c21273d9485a49394386c4575804770667f2ddb59b3bf0669fddd2f", - "7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b", - "2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de" - ], - "threshold": 3 - }, - "timestamp": { - "keyids": [ - "e1863ba02070322ebc626dcecf9d881a3a38c35c3b41a83765b6ad6c37eaec2a" - ], - "threshold": 1 - } - }, - "consistent_snapshot": true - }, - "signatures": [ - { - "keyid": "ff51e17fcf253119b7033f6f57512631da4a0969442afcf9fc8b141c7f2be99c", - "sig": "3045022100fc1c2be509ce50ea917bbad1d9efe9d96c8c2ebea04af2717aa3d9c6fe617a75022012eef282a19f2d8bd4818aa333ef48a06489f49d4d34a20b8fe8fc867bb25a7a" - }, - { - "keyid": "25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99", - "sig": "30450221008a4392ae5057fc00778b651e61fea244766a4ae58db84d9f1d3810720ab0f3b702207c49e59e8031318caf02252ecea1281cecc1e5986c309a9cef61f455ecf7165d" - }, - { - "keyid": "7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b", - "sig": "3046022100da1b8dc5d53aaffbbfac98de3e23ee2d2ad3446a7bed09fac0f88bae19be2587022100b681c046afc3919097dfe794e0d819be891e2e850aade315bec06b0c4dea221b" - }, - { - "keyid": "2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de", - "sig": "3046022100b534e0030e1b271133ecfbdf3ba9fbf3becb3689abea079a2150afbb63cdb7c70221008c39a718fd9495f249b4ab8788d5b9dc269f0868dbe38b272f48207359d3ded9" - }, - { - "keyid": "2f64fb5eac0cf94dd39bb45308b98920055e9a0d8e012a7220787834c60aef97", - "sig": "3045022100fc1c2be509ce50ea917bbad1d9efe9d96c8c2ebea04af2717aa3d9c6fe617a75022012eef282a19f2d8bd4818aa333ef48a06489f49d4d34a20b8fe8fc867bb25a7a" - }, - { - "keyid": "eaf22372f417dd618a46f6c627dbc276e9fd30a004fc94f9be946e73f8bd090b", - "sig": "30450221008a4392ae5057fc00778b651e61fea244766a4ae58db84d9f1d3810720ab0f3b702207c49e59e8031318caf02252ecea1281cecc1e5986c309a9cef61f455ecf7165d" - }, - { - "keyid": "f505595165a177a41750a8e864ed1719b1edfccd5a426fd2c0ffda33ce7ff209", - "sig": "3046022100da1b8dc5d53aaffbbfac98de3e23ee2d2ad3446a7bed09fac0f88bae19be2587022100b681c046afc3919097dfe794e0d819be891e2e850aade315bec06b0c4dea221b" - }, - { - "keyid": "75e867ab10e121fdef32094af634707f43ddd79c6bab8ad6c5ab9f03f4ea8c90", - "sig": "3046022100b534e0030e1b271133ecfbdf3ba9fbf3becb3689abea079a2150afbb63cdb7c70221008c39a718fd9495f249b4ab8788d5b9dc269f0868dbe38b272f48207359d3ded9" - } - ] -} \ No newline at end of file +{"signed":{"_type":"root","spec_version":"1.0","version":7,"expires":"2023-10-04T13:08:11Z","keys":{"25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEEXsz3SZXFb8jMV42j6pJlyjbjR8K\nN3Bwocexq6LMIb5qsWKOQvLN16NUefLc4HswOoumRsVVaajSpQS6fobkRw==\n-----END PUBLIC KEY-----\n"}},"2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE0ghrh92Lw1Yr3idGV5WqCtMDB8Cx\n+D8hdC4w2ZLNIplVRoVGLskYa3gheMyOjiJ8kPi15aQ2//7P+oj7UvJPGw==\n-----END PUBLIC KEY-----\n"}},"45b283825eb184cabd582eb17b74fc8ed404f68cf452acabdad2ed6f90ce216b":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAELrWvNt94v4R085ELeeCMxHp7PldF\n0/T1GxukUh2ODuggLGJE0pc1e8CSBf6CS91Fwo9FUOuRsjBUld+VqSyCdQ==\n-----END PUBLIC KEY-----\n"}},"7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEinikSsAQmYkNeH5eYq/CnIzLaacO\nxlSaawQDOwqKy/tCqxq5xxPSJc21K4WIhs9GyOkKfzueY3GILzcMJZ4cWw==\n-----END PUBLIC KEY-----\n"}},"e1863ba02070322ebc626dcecf9d881a3a38c35c3b41a83765b6ad6c37eaec2a":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEWRiGr5+j+3J5SsH+Ztr5nE2H2wO7\nBV+nO3s93gLca18qTOzHY1oWyAGDykMSsGTUBSt9D+An0KfKsD2mfSM42Q==\n-----END PUBLIC KEY-----\n"}},"f5312f542c21273d9485a49394386c4575804770667f2ddb59b3bf0669fddd2f":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEzBzVOmHCPojMVLSI364WiiV8NPrD\n6IgRxVliskz/v+y3JER5mcVGcONliDcWMC5J2lfHmjPNPhb4H7xm8LzfSA==\n-----END PUBLIC KEY-----\n"}},"ff51e17fcf253119b7033f6f57512631da4a0969442afcf9fc8b141c7f2be99c":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEy8XKsmhBYDI8Jc0GwzBxeKax0cm5\nSTKEU65HPFunUn41sT8pi0FjM4IkHz/YUmwmLUO0Wt7lxhj6BkLIK4qYAw==\n-----END PUBLIC KEY-----\n"}}},"roles":{"root":{"keyids":["ff51e17fcf253119b7033f6f57512631da4a0969442afcf9fc8b141c7f2be99c","25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99","f5312f542c21273d9485a49394386c4575804770667f2ddb59b3bf0669fddd2f","7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b","2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de"],"threshold":3},"snapshot":{"keyids":["45b283825eb184cabd582eb17b74fc8ed404f68cf452acabdad2ed6f90ce216b"],"threshold":1},"targets":{"keyids":["ff51e17fcf253119b7033f6f57512631da4a0969442afcf9fc8b141c7f2be99c","25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99","f5312f542c21273d9485a49394386c4575804770667f2ddb59b3bf0669fddd2f","7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b","2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de"],"threshold":3},"timestamp":{"keyids":["e1863ba02070322ebc626dcecf9d881a3a38c35c3b41a83765b6ad6c37eaec2a"],"threshold":1}},"consistent_snapshot":true},"signatures":[{"keyid":"25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99","sig":"3046022100c0610c0055ce5c4a52d054d7322e7b514d55baf44423d63aa4daa077cc60fd1f022100a097f2803f090fb66c42ead915a2c46ebe7db53a32bf18f2188275cc936f8bdd"},{"keyid":"f5312f542c21273d9485a49394386c4575804770667f2ddb59b3bf0669fddd2f","sig":"304502203134f0468810299d5493a867c40630b341296b92e59c29821311d353343bb3a4022100e667ae3d304e7e3da0894c7425f6b9ecd917106841280e5cf6f3496ad5f8f68e"},{"keyid":"7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b","sig":"3045022037fe5f45426f21eaaf4730d2136f2b1611d6379688f79b9d1e3f61719997135c022100b63b022d7b79d4694b96f416d88aa4d7b1a3bff8a01f4fb51e0f42137c7d2d06"},{"keyid":"2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de","sig":"3044022007cc8fcc4940809f2751ad5b535f4c5f53f5b4952f5b5696b09668e743306ac1022006dfcdf94e94c92163eeb1b47796db62cedaa730aa13aa61b573fe23714730f2"}]} diff --git a/package-lock.json b/package-lock.json index 2fde2175fb1ef..692a5871b7678 100644 --- a/package-lock.json +++ b/package-lock.json @@ -11436,9 +11436,9 @@ } }, "node_modules/sigstore": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-1.4.0.tgz", - "integrity": "sha512-N7TRpSbFjY/TrFDg6yGAQSYBrQ5s6qmPiq4pD6fkv1LoyfMsLG0NwZWG2s5q+uttLHgyVyTa0Rogx2P78rN8kQ==", + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-1.5.2.tgz", + "integrity": "sha512-X95v6xAAooVpn7PaB94TDmFeSO5SBfCtB1R23fvzr36WTfjtkiiyOeei979nbTjc8nzh6FSLeltQZuODsm1EjQ==", "inBundle": true, "dependencies": { "@sigstore/protobuf-specs": "^0.1.0", From c22f1243db46f21d2c1495001d0447da860bf14d Mon Sep 17 00:00:00 2001 From: Gar Date: Tue, 16 May 2023 09:51:24 -0700 Subject: [PATCH 05/11] deps: @npmcli/package-json@3.1.0 --- DEPENDENCIES.md | 9 +- .../@npmcli/package-json/lib/index.js | 157 ++++++++-- .../@npmcli/package-json/lib/normalize.js | 284 ++++++++++++++++++ .../@npmcli/package-json/package.json | 14 +- package-lock.json | 13 +- package.json | 2 +- 6 files changed, 439 insertions(+), 40 deletions(-) create mode 100644 node_modules/@npmcli/package-json/lib/normalize.js diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md index 7c9e187f4783f..a7df0ec604f95 100644 --- a/DEPENDENCIES.md +++ b/DEPENDENCIES.md @@ -214,6 +214,8 @@ graph LR; npmcli-mock-registry-->npmcli-template-oss["@npmcli/template-oss"]; npmcli-mock-registry-->pacote; npmcli-package-json-->json-parse-even-better-errors; + npmcli-package-json-->normalize-package-data; + npmcli-package-json-->npm-normalize-package-bin; npmcli-run-script-->npmcli-node-gyp["@npmcli/node-gyp"]; npmcli-run-script-->npmcli-promise-spawn["@npmcli/promise-spawn"]; npmcli-run-script-->read-package-json-fast; @@ -694,7 +696,10 @@ graph LR; npmcli-mock-registry-->tap; npmcli-move-file-->mkdirp; npmcli-move-file-->rimraf; + npmcli-package-json-->glob; npmcli-package-json-->json-parse-even-better-errors; + npmcli-package-json-->normalize-package-data; + npmcli-package-json-->npm-normalize-package-bin; npmcli-promise-spawn-->which; npmcli-query-->postcss-selector-parser; npmcli-run-script-->node-gyp; @@ -817,6 +822,6 @@ packages higher up the chain. - pacote, libnpmhook, libnpmorg, libnpmsearch, libnpmteam, npm-profile - npm-registry-fetch, libnpmversion - @npmcli/git, make-fetch-happen, @npmcli/config, init-package-json - - @npmcli/installed-package-contents, @npmcli/map-workspaces, cacache, npm-pick-manifest, @npmcli/run-script, read-package-json, promzard - - @npmcli/docs, @npmcli/fs, npm-bundled, read-package-json-fast, unique-filename, npm-install-checks, npm-package-arg, npm-packlist, normalize-package-data, @npmcli/package-json, bin-links, nopt, npmlog, parse-conflict-json, read + - @npmcli/installed-package-contents, @npmcli/map-workspaces, cacache, npm-pick-manifest, @npmcli/run-script, read-package-json, @npmcli/package-json, promzard + - @npmcli/docs, @npmcli/fs, npm-bundled, read-package-json-fast, unique-filename, npm-install-checks, npm-package-arg, npm-packlist, normalize-package-data, bin-links, nopt, npmlog, parse-conflict-json, read - @npmcli/eslint-config, @npmcli/template-oss, ignore-walk, semver, npm-normalize-package-bin, @npmcli/name-from-folder, json-parse-even-better-errors, fs-minipass, ssri, unique-slug, @npmcli/promise-spawn, hosted-git-info, proc-log, validate-npm-package-name, @npmcli/node-gyp, minipass-fetch, @npmcli/query, cmd-shim, read-cmd-shim, write-file-atomic, abbrev, are-we-there-yet, gauge, minify-registry-metadata, ini, @npmcli/disparity-colors, mute-stream, npm-audit-report, npm-user-validate diff --git a/node_modules/@npmcli/package-json/lib/index.js b/node_modules/@npmcli/package-json/lib/index.js index e98308f3d3b84..34e415b45d49f 100644 --- a/node_modules/@npmcli/package-json/lib/index.js +++ b/node_modules/@npmcli/package-json/lib/index.js @@ -1,18 +1,12 @@ -const fs = require('fs') -const promisify = require('util').promisify -const readFile = promisify(fs.readFile) -const writeFile = promisify(fs.writeFile) +const { readFile, writeFile } = require('fs/promises') const { resolve } = require('path') const updateDeps = require('./update-dependencies.js') const updateScripts = require('./update-scripts.js') const updateWorkspaces = require('./update-workspaces.js') +const normalize = require('./normalize.js') const parseJSON = require('json-parse-even-better-errors') -const _filename = Symbol('filename') -const _manifest = Symbol('manifest') -const _readFileContent = Symbol('readFileContent') - // a list of handy specialized helper functions that take // care of special cases that are handled by the npm cli const knownSteps = new Set([ @@ -29,42 +23,111 @@ const knownKeys = new Set([ ]) class PackageJson { + static normalizeSteps = Object.freeze([ + '_id', + '_attributes', + 'bundledDependencies', + 'bundleDependencies', + 'optionalDedupe', + 'scripts', + 'funding', + 'bin', + ]) + + static prepareSteps = Object.freeze([ + '_attributes', + 'bundledDependencies', + 'bundleDependencies', + 'gypfile', + 'serverjs', + 'scriptpath', + 'authors', + 'readme', + 'mans', + 'binDir', + 'gitHead', + 'fillTypes', + 'normalizeData', + 'binRefs', + ]) + + // default behavior, just loads and parses static async load (path) { return await new PackageJson(path).load() } + // read-package-json compatible behavior + static async prepare (path, opts) { + return await new PackageJson(path).prepare(opts) + } + + // read-package-json-fast compatible behavior + static async normalize (path, opts) { + return await new PackageJson(path).normalize(opts) + } + + #filename + #path + #manifest = {} + #readFileContent = '' + #fromIndex = false + constructor (path) { - this[_filename] = resolve(path, 'package.json') - this[_manifest] = {} - this[_readFileContent] = '' + this.#path = path + this.#filename = resolve(path, 'package.json') } - async load () { + async load (parseIndex) { + let parseErr try { - this[_readFileContent] = - await readFile(this[_filename], 'utf8') + this.#readFileContent = + await readFile(this.#filename, 'utf8') } catch (err) { - throw new Error('package.json not found') + err.message = `Could not read package.json: ${err}` + if (!parseIndex) { + throw err + } + parseErr = err + } + + if (parseErr) { + const indexFile = resolve(this.#path, 'index.js') + let indexFileContent + try { + indexFileContent = await readFile(indexFile, 'utf8') + } catch (err) { + throw parseErr + } + try { + this.#manifest = fromComment(indexFileContent) + } catch (err) { + throw parseErr + } + this.#fromIndex = true + return this } try { - this[_manifest] = - parseJSON(this[_readFileContent]) + this.#manifest = parseJSON(this.#readFileContent) } catch (err) { - throw new Error(`Invalid package.json: ${err}`) + err.message = `Invalid package.json: ${err}` + throw err } - return this } get content () { - return this[_manifest] + return this.#manifest + } + + get path () { + return this.#path } update (content) { // validates both current manifest and content param const invalidContent = - typeof this[_manifest] !== 'object' + typeof this.#manifest !== 'object' || typeof content !== 'object' if (invalidContent) { throw Object.assign( @@ -74,13 +137,13 @@ class PackageJson { } for (const step of knownSteps) { - this[_manifest] = step({ content, originalContent: this[_manifest] }) + this.#manifest = step({ content, originalContent: this.#manifest }) } // unknown properties will just be overwitten for (const [key, value] of Object.entries(content)) { if (!knownKeys.has(key)) { - this[_manifest][key] = value + this.#manifest[key] = value } } @@ -88,22 +151,62 @@ class PackageJson { } async save () { + if (this.#fromIndex) { + throw new Error('No package.json to save to') + } const { [Symbol.for('indent')]: indent, [Symbol.for('newline')]: newline, - } = this[_manifest] + } = this.#manifest const format = indent === undefined ? ' ' : indent const eol = newline === undefined ? '\n' : newline const fileContent = `${ - JSON.stringify(this[_manifest], null, format) + JSON.stringify(this.#manifest, null, format) }\n` .replace(/\n/g, eol) - if (fileContent.trim() !== this[_readFileContent].trim()) { - return await writeFile(this[_filename], fileContent) + if (fileContent.trim() !== this.#readFileContent.trim()) { + return await writeFile(this.#filename, fileContent) } } + + async normalize (opts = {}) { + if (!opts.steps) { + opts.steps = this.constructor.normalizeSteps + } + await this.load() + await normalize(this, opts) + return this + } + + async prepare (opts = {}) { + if (!opts.steps) { + opts.steps = this.constructor.prepareSteps + } + await this.load(true) + await normalize(this, opts) + return this + } +} + +// /**package { "name": "foo", "version": "1.2.3", ... } **/ +function fromComment (data) { + data = data.split(/^\/\*\*package(?:\s|$)/m) + + if (data.length < 2) { + throw new Error('File has no package in comments') + } + data = data[1] + data = data.split(/\*\*\/$/m) + + if (data.length < 2) { + throw new Error('File has no package in comments') + } + data = data[0] + data = data.replace(/^\s*\*/mg, '') + + return parseJSON(data) } module.exports = PackageJson diff --git a/node_modules/@npmcli/package-json/lib/normalize.js b/node_modules/@npmcli/package-json/lib/normalize.js new file mode 100644 index 0000000000000..bc101cd4fde1b --- /dev/null +++ b/node_modules/@npmcli/package-json/lib/normalize.js @@ -0,0 +1,284 @@ +const fs = require('fs/promises') +const { glob } = require('glob') +const normalizePackageBin = require('npm-normalize-package-bin') +const normalizePackageData = require('normalize-package-data') +const path = require('path') + +const normalize = async (pkg, { strict, steps }) => { + const data = pkg.content + const scripts = data.scripts || {} + + // remove attributes that start with "_" + if (steps.includes('_attributes')) { + for (const key in data) { + if (key.startsWith('_')) { + delete pkg.content[key] + } + } + } + + // build the "_id" attribute + if (steps.includes('_id')) { + if (data.name && data.version) { + data._id = `${data.name}@${data.version}` + } + } + + // fix bundledDependencies typo + if (steps.includes('bundledDependencies')) { + if (data.bundleDependencies === undefined && data.bundledDependencies !== undefined) { + data.bundleDependencies = data.bundledDependencies + } + delete data.bundledDependencies + } + // expand "bundleDependencies: true or translate from object" + if (steps.includes('bundleDependencies')) { + const bd = data.bundleDependencies + if (bd === true) { + data.bundleDependencies = Object.keys(data.dependencies || {}) + } else if (bd && typeof bd === 'object') { + if (!Array.isArray(bd)) { + data.bundleDependencies = Object.keys(bd) + } + } else { + delete data.bundleDependencies + } + } + + // it was once common practice to list deps both in optionalDependencies and + // in dependencies, to support npm versions that did not know about + // optionalDependencies. This is no longer a relevant need, so duplicating + // the deps in two places is unnecessary and excessive. + if (steps.includes('optionalDedupe')) { + if (data.dependencies && + data.optionalDependencies && typeof data.optionalDependencies === 'object') { + for (const name in data.optionalDependencies) { + delete data.dependencies[name] + } + if (!Object.keys(data.dependencies).length) { + delete data.dependencies + } + } + } + + // add "install" attribute if any "*.gyp" files exist + if (steps.includes('gypfile')) { + if (!scripts.install && !scripts.preinstall && data.gypfile !== false) { + const files = await glob('*.gyp', { cwd: pkg.path }) + if (files.length) { + scripts.install = 'node-gyp rebuild' + data.scripts = scripts + data.gypfile = true + } + } + } + + // add "start" attribute if "server.js" exists + if (steps.includes('serverjs') && !scripts.start) { + try { + await fs.access(path.join(pkg.path, 'server.js')) + scripts.start = 'node server.js' + data.scripts = scripts + } catch { + // do nothing + } + } + + // strip "node_modules/.bin" from scripts entries + if (steps.includes('scripts') || steps.includes('scriptpath')) { + const spre = /^(\.[/\\])?node_modules[/\\].bin[\\/]/ + if (typeof data.scripts === 'object') { + for (const name in data.scripts) { + if (typeof data.scripts[name] !== 'string') { + delete data.scripts[name] + } else if (steps.includes('scriptpath')) { + data.scripts[name] = data.scripts[name].replace(spre, '') + } + } + } else { + delete data.scripts + } + } + + if (steps.includes('funding')) { + if (data.funding && typeof data.funding === 'string') { + data.funding = { url: data.funding } + } + } + + // populate "authors" attribute + if (steps.includes('authors') && !data.contributors) { + try { + const authorData = await fs.readFile(path.join(pkg.path, 'AUTHORS'), 'utf8') + const authors = authorData.split(/\r?\n/g) + .map(line => line.replace(/^\s*#.*$/, '').trim()) + .filter(line => line) + data.contributors = authors + } catch { + // do nothing + } + } + + // populate "readme" attribute + if (steps.includes('readme') && !data.readme) { + const mdre = /\.m?a?r?k?d?o?w?n?$/i + const files = await glob('{README,README.*}', { cwd: pkg.path, nocase: true, mark: true }) + let readmeFile + for (const file of files) { + // don't accept directories. + if (!file.endsWith(path.sep)) { + if (file.match(mdre)) { + readmeFile = file + break + } + if (file.endsWith('README')) { + readmeFile = file + } + } + } + if (readmeFile) { + const readmeData = await fs.readFile(path.join(pkg.path, readmeFile), 'utf8') + data.readme = readmeData + data.readmeFilename = readmeFile + } + } + + // expand directories.man + if (steps.includes('mans') && !data.man && data.directories?.man) { + const manDir = data.directories.man + const cwd = path.resolve(pkg.path, manDir) + const files = await glob('**/*.[0-9]', { cwd }) + data.man = files.map(man => + path.relative(pkg.path, path.join(cwd, man)).split(path.sep).join('/') + ) + } + + if (steps.includes('bin') || steps.includes('binDir') || steps.includes('binRefs')) { + normalizePackageBin(data) + } + + // expand "directories.bin" + if (steps.includes('binDir') && data.directories?.bin) { + const binsDir = path.resolve(pkg.path, path.join('.', path.join('/', data.directories.bin))) + const bins = await glob('**', { cwd: binsDir }) + data.bin = bins.reduce((acc, binFile) => { + if (binFile && !binFile.startsWith('.')) { + const binName = path.basename(binFile) + acc[binName] = path.join(data.directories.bin, binFile) + } + return acc + }, {}) + // *sigh* + normalizePackageBin(data) + } + + // populate "gitHead" attribute + if (steps.includes('gitHead') && !data.gitHead) { + let head + try { + head = await fs.readFile(path.resolve(pkg.path, '.git/HEAD'), 'utf8') + } catch (err) { + // do nothing + } + let headData + if (head) { + if (head.startsWith('ref: ')) { + const headRef = head.replace(/^ref: /, '').trim() + const headFile = path.resolve(pkg.path, '.git', headRef) + try { + headData = await fs.readFile(headFile, 'utf8') + headData = headData.replace(/^ref: /, '').trim() + } catch (err) { + // do nothing + } + if (!headData) { + const packFile = path.resolve(pkg.path, '.git/packed-refs') + try { + let refs = await fs.readFile(packFile, 'utf8') + if (refs) { + refs = refs.split('\n') + for (let i = 0; i < refs.length; i++) { + const match = refs[i].match(/^([0-9a-f]{40}) (.+)$/) + if (match && match[2].trim() === headRef) { + headData = match[1] + break + } + } + } + } catch { + // do nothing + } + } + } else { + headData = head.trim() + } + } + if (headData) { + data.gitHead = headData + } + } + + // populate "types" attribute + if (steps.includes('fillTypes')) { + const index = data.main || 'index.js' + + if (typeof index !== 'string') { + throw new TypeError('The "main" attribute must be of type string.') + } + + // TODO exports is much more complicated than this in verbose format + // We need to support for instance + + // "exports": { + // ".": [ + // { + // "default": "./lib/npm.js" + // }, + // "./lib/npm.js" + // ], + // "./package.json": "./package.json" + // }, + // as well as conditional exports + + // if (data.exports && typeof data.exports === 'string') { + // index = data.exports + // } + + // if (data.exports && data.exports['.']) { + // index = data.exports['.'] + // if (typeof index !== 'string') { + // } + // } + const extless = path.join(path.dirname(index), path.basename(index, path.extname(index))) + const dts = `./${extless}.d.ts` + const hasDTSFields = 'types' in data || 'typings' in data + if (!hasDTSFields) { + try { + await fs.access(path.join(pkg.path, dts)) + data.types = dts.split(path.sep).join('/') + } catch { + // do nothing + } + } + } + + // "normalizeData" from read-package-json + if (steps.includes('normalizeData')) { + normalizePackageData(data, strict) + } + + // Warn if the bin references don't point to anything. This might be better + // in normalize-package-data if it had access to the file path. + if (steps.includes('binRefs') && data.bin instanceof Object) { + for (const key in data.bin) { + const binPath = path.resolve(pkg.path, data.bin[key]) + try { + await fs.access(binPath) + } catch { + delete data.bin[key] + } + } + } +} + +module.exports = normalize diff --git a/node_modules/@npmcli/package-json/package.json b/node_modules/@npmcli/package-json/package.json index faae7891a1e72..61607c5bb6ae7 100644 --- a/node_modules/@npmcli/package-json/package.json +++ b/node_modules/@npmcli/package-json/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/package-json", - "version": "3.0.0", + "version": "3.1.0", "description": "Programmatic API to update package.json", "main": "lib/index.js", "files": [ @@ -24,12 +24,15 @@ "author": "GitHub Inc.", "license": "ISC", "devDependencies": { - "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "4.5.1", + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.15.1", "tap": "^16.0.1" }, "dependencies": { - "json-parse-even-better-errors": "^3.0.0" + "glob": "^10.2.2", + "json-parse-even-better-errors": "^3.0.0", + "normalize-package-data": "^5.0.0", + "npm-normalize-package-bin": "^3.0.1" }, "repository": { "type": "git", @@ -40,7 +43,8 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.5.1" + "version": "4.15.1", + "publish": "true" }, "tap": { "nyc-arg": [ diff --git a/package-lock.json b/package-lock.json index 692a5871b7678..b0dab2437e66d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -87,7 +87,7 @@ "@npmcli/arborist": "^6.2.9", "@npmcli/config": "^6.1.6", "@npmcli/map-workspaces": "^3.0.4", - "@npmcli/package-json": "^3.0.0", + "@npmcli/package-json": "^3.1.0", "@npmcli/run-script": "^6.0.2", "abbrev": "^2.0.0", "archy": "~1.0.0", @@ -2311,12 +2311,15 @@ } }, "node_modules/@npmcli/package-json": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-3.0.0.tgz", - "integrity": "sha512-NnuPuM97xfiCpbTEJYtEuKz6CFbpUHtaT0+5via5pQeI25omvQDFbp1GcGJ/c4zvL/WX0qbde6YiLgfZbWFgvg==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-3.1.0.tgz", + "integrity": "sha512-qNPy6Yf9ruFST99xcrl5EWAvrb7qFrwgVbwdzcTJlIgxbArKOq5e/bgZ6rTL1X9hDgAdPbvL8RWx/OTLSB0ToA==", "inBundle": true, "dependencies": { - "json-parse-even-better-errors": "^3.0.0" + "glob": "^10.2.2", + "json-parse-even-better-errors": "^3.0.0", + "normalize-package-data": "^5.0.0", + "npm-normalize-package-bin": "^3.0.1" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" diff --git a/package.json b/package.json index 452184aba643a..5faef77f95891 100644 --- a/package.json +++ b/package.json @@ -56,7 +56,7 @@ "@npmcli/arborist": "^6.2.9", "@npmcli/config": "^6.1.6", "@npmcli/map-workspaces": "^3.0.4", - "@npmcli/package-json": "^3.0.0", + "@npmcli/package-json": "^3.1.0", "@npmcli/run-script": "^6.0.2", "abbrev": "^2.0.0", "archy": "~1.0.0", From 1cec130cb3a8e8927e53a4dd8f6a845ec5dae63e Mon Sep 17 00:00:00 2001 From: Gar Date: Tue, 16 May 2023 09:55:33 -0700 Subject: [PATCH 06/11] deps: signal-exit@4.0.2 --- node_modules/signal-exit/dist/cjs/index.js | 1 - node_modules/signal-exit/dist/mjs/index.js | 1 - node_modules/signal-exit/package.json | 2 +- package-lock.json | 6 +++--- 4 files changed, 4 insertions(+), 6 deletions(-) diff --git a/node_modules/signal-exit/dist/cjs/index.js b/node_modules/signal-exit/dist/cjs/index.js index aef98326598c2..9312d0c96e879 100644 --- a/node_modules/signal-exit/dist/cjs/index.js +++ b/node_modules/signal-exit/dist/cjs/index.js @@ -34,7 +34,6 @@ class Emitter { id = Math.random(); constructor() { if (global[kExitEmitter]) { - console.error('reusing global emitter'); return global[kExitEmitter]; } ObjectDefineProperty(global, kExitEmitter, { diff --git a/node_modules/signal-exit/dist/mjs/index.js b/node_modules/signal-exit/dist/mjs/index.js index b9d1b569b733f..97f409239c8f3 100644 --- a/node_modules/signal-exit/dist/mjs/index.js +++ b/node_modules/signal-exit/dist/mjs/index.js @@ -30,7 +30,6 @@ class Emitter { id = Math.random(); constructor() { if (global[kExitEmitter]) { - console.error('reusing global emitter'); return global[kExitEmitter]; } ObjectDefineProperty(global, kExitEmitter, { diff --git a/node_modules/signal-exit/package.json b/node_modules/signal-exit/package.json index 5e7e3a74d95d8..455452f96a0b3 100644 --- a/node_modules/signal-exit/package.json +++ b/node_modules/signal-exit/package.json @@ -1,6 +1,6 @@ { "name": "signal-exit", - "version": "4.0.1", + "version": "4.0.2", "description": "when you want to fire an event no matter how a process exits.", "main": "./dist/cjs/index.js", "module": "./dist/mjs/index.js", diff --git a/package-lock.json b/package-lock.json index b0dab2437e66d..ca66cbcf886f4 100644 --- a/package-lock.json +++ b/package-lock.json @@ -11427,9 +11427,9 @@ } }, "node_modules/signal-exit": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.0.1.tgz", - "integrity": "sha512-uUWsN4aOxJAS8KOuf3QMyFtgm1pkb6I+KRZbRF/ghdf5T7sM+B1lLLzPDxswUjkmHyxQAVzEgG35E3NzDM9GVw==", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.0.2.tgz", + "integrity": "sha512-MY2/qGx4enyjprQnFaZsHib3Yadh3IXyV2C321GY0pjGfVBu4un0uDJkwgdxqO+Rdx8JMT8IfJIRwbYVz3Ob3Q==", "inBundle": true, "engines": { "node": ">=14" From 894e9dc17946791f316fbb67dcc79d2ae4402892 Mon Sep 17 00:00:00 2001 From: Gar Date: Tue, 16 May 2023 09:57:02 -0700 Subject: [PATCH 07/11] deps: path-scurry@1.9.1 --- node_modules/path-scurry/dist/cjs/index.js | 49 ++++++++++++++++++++-- node_modules/path-scurry/dist/mjs/index.js | 49 ++++++++++++++++++++-- node_modules/path-scurry/package.json | 8 ++-- package-lock.json | 10 ++--- 4 files changed, 99 insertions(+), 17 deletions(-) diff --git a/node_modules/path-scurry/dist/cjs/index.js b/node_modules/path-scurry/dist/cjs/index.js index d209f0e833049..8044c7e581d2e 100644 --- a/node_modules/path-scurry/dist/cjs/index.js +++ b/node_modules/path-scurry/dist/cjs/index.js @@ -159,6 +159,7 @@ class ChildrenCache extends lru_cache_1.LRUCache { } } exports.ChildrenCache = ChildrenCache; +const setAsCwd = Symbol('PathScurry setAsCwd'); /** * Path objects are sort of like a super-powered * {@link https://nodejs.org/docs/latest/api/fs.html#class-fsdirent fs.Dirent} @@ -291,6 +292,16 @@ class PathBase { #children; #linkTarget; #realpath; + /** + * This property is for compatibility with the Dirent class as of + * Node v20, where Dirent['path'] refers to the path of the directory + * that was passed to readdir. So, somewhat counterintuitively, this + * property refers to the *parent* path, not the path object itself. + * For root entries, it's the path to the entry itself. + */ + get path() { + return (this.parent || this).fullpath(); + } /** * Do not create new Path objects directly. They should always be accessed * via the PathScurry class or other methods on the Path class. @@ -438,8 +449,7 @@ class PathBase { return (this.#relative = this.name); } const pv = p.relative(); - const rp = pv + (!pv || !p.parent ? '' : this.sep) + name; - return (this.#relative = rp); + return pv + (!pv || !p.parent ? '' : this.sep) + name; } /** * The relative path from the cwd, using / as the path separator. @@ -458,8 +468,7 @@ class PathBase { return (this.#relativePosix = this.fullpathPosix()); } const pv = p.relativePosix(); - const rp = pv + (!pv || !p.parent ? '' : '/') + name; - return (this.#relativePosix = rp); + return pv + (!pv || !p.parent ? '' : '/') + name; } /** * The fully resolved path string for this Path entry @@ -1111,6 +1120,33 @@ class PathBase { this.#markENOREALPATH(); } } + /** + * Internal method to mark this Path object as the scurry cwd, + * called by {@link PathScurry#chdir} + * + * @internal + */ + [setAsCwd](oldCwd) { + if (oldCwd === this) + return; + const changed = new Set([]); + let rp = []; + let p = this; + while (p && p.parent) { + changed.add(p); + p.#relative = rp.join(this.sep); + p.#relativePosix = rp.join('/'); + p = p.parent; + rp.push('..'); + } + // now un-memoize parents of old cwd + p = oldCwd; + while (p && p.parent && !changed.has(p)) { + p.#relative = undefined; + p.#relativePosix = undefined; + p = p.parent; + } + } } exports.PathBase = PathBase; /** @@ -1838,6 +1874,11 @@ class PathScurryBase { process(); return results; } + chdir(path = this.cwd) { + const oldCwd = this.cwd; + this.cwd = typeof path === 'string' ? this.cwd.resolve(path) : path; + this.cwd[setAsCwd](oldCwd); + } } exports.PathScurryBase = PathScurryBase; /** diff --git a/node_modules/path-scurry/dist/mjs/index.js b/node_modules/path-scurry/dist/mjs/index.js index 8490cf73124f3..957f087c86514 100644 --- a/node_modules/path-scurry/dist/mjs/index.js +++ b/node_modules/path-scurry/dist/mjs/index.js @@ -131,6 +131,7 @@ export class ChildrenCache extends LRUCache { }); } } +const setAsCwd = Symbol('PathScurry setAsCwd'); /** * Path objects are sort of like a super-powered * {@link https://nodejs.org/docs/latest/api/fs.html#class-fsdirent fs.Dirent} @@ -263,6 +264,16 @@ export class PathBase { #children; #linkTarget; #realpath; + /** + * This property is for compatibility with the Dirent class as of + * Node v20, where Dirent['path'] refers to the path of the directory + * that was passed to readdir. So, somewhat counterintuitively, this + * property refers to the *parent* path, not the path object itself. + * For root entries, it's the path to the entry itself. + */ + get path() { + return (this.parent || this).fullpath(); + } /** * Do not create new Path objects directly. They should always be accessed * via the PathScurry class or other methods on the Path class. @@ -410,8 +421,7 @@ export class PathBase { return (this.#relative = this.name); } const pv = p.relative(); - const rp = pv + (!pv || !p.parent ? '' : this.sep) + name; - return (this.#relative = rp); + return pv + (!pv || !p.parent ? '' : this.sep) + name; } /** * The relative path from the cwd, using / as the path separator. @@ -430,8 +440,7 @@ export class PathBase { return (this.#relativePosix = this.fullpathPosix()); } const pv = p.relativePosix(); - const rp = pv + (!pv || !p.parent ? '' : '/') + name; - return (this.#relativePosix = rp); + return pv + (!pv || !p.parent ? '' : '/') + name; } /** * The fully resolved path string for this Path entry @@ -1083,6 +1092,33 @@ export class PathBase { this.#markENOREALPATH(); } } + /** + * Internal method to mark this Path object as the scurry cwd, + * called by {@link PathScurry#chdir} + * + * @internal + */ + [setAsCwd](oldCwd) { + if (oldCwd === this) + return; + const changed = new Set([]); + let rp = []; + let p = this; + while (p && p.parent) { + changed.add(p); + p.#relative = rp.join(this.sep); + p.#relativePosix = rp.join('/'); + p = p.parent; + rp.push('..'); + } + // now un-memoize parents of old cwd + p = oldCwd; + while (p && p.parent && !changed.has(p)) { + p.#relative = undefined; + p.#relativePosix = undefined; + p = p.parent; + } + } } /** * Path class used on win32 systems @@ -1807,6 +1843,11 @@ export class PathScurryBase { process(); return results; } + chdir(path = this.cwd) { + const oldCwd = this.cwd; + this.cwd = typeof path === 'string' ? this.cwd.resolve(path) : path; + this.cwd[setAsCwd](oldCwd); + } } /** * Windows implementation of {@link PathScurryBase} diff --git a/node_modules/path-scurry/package.json b/node_modules/path-scurry/package.json index bb282b966c53c..677bf1ce9b6e5 100644 --- a/node_modules/path-scurry/package.json +++ b/node_modules/path-scurry/package.json @@ -1,6 +1,6 @@ { "name": "path-scurry", - "version": "1.7.0", + "version": "1.9.1", "description": "walk paths fast and efficiently", "author": "Isaac Z. Schlueter (https://blog.izs.me)", "main": "./dist/cjs/index.js", @@ -58,7 +58,7 @@ }, "devDependencies": { "@nodelib/fs.walk": "^1.2.8", - "@types/node": "^18.11.18", + "@types/node": "^20.1.4", "@types/tap": "^15.0.7", "c8": "^7.12.0", "eslint-config-prettier": "^8.6.0", @@ -81,7 +81,7 @@ "url": "git+https://github.com/isaacs/path-walker" }, "dependencies": { - "lru-cache": "^9.0.0", - "minipass": "^5.0.0" + "lru-cache": "^9.1.1", + "minipass": "^5.0.0 || ^6.0.0" } } diff --git a/package-lock.json b/package-lock.json index ca66cbcf886f4..b89b581290916 100644 --- a/package-lock.json +++ b/package-lock.json @@ -10269,13 +10269,13 @@ "dev": true }, "node_modules/path-scurry": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.7.0.tgz", - "integrity": "sha512-UkZUeDjczjYRE495+9thsgcVgsaCPkaw80slmfVFgllxY+IO8ubTsOpFVjDPROBqJdHfVPUFRHPBV/WciOVfWg==", + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.9.1.tgz", + "integrity": "sha512-UgmoiySyjFxP6tscZDgWGEAgsW5ok8W3F5CJDnnH2pozwSTGE6eH7vwTotMwATWA2r5xqdkKdxYPkwlJjAI/3g==", "inBundle": true, "dependencies": { - "lru-cache": "^9.0.0", - "minipass": "^5.0.0" + "lru-cache": "^9.1.1", + "minipass": "^5.0.0 || ^6.0.0" }, "engines": { "node": ">=16 || 14 >=14.17" From 4bee0d74e9dc6b09a2e70a47d2d0a21ee522e7f8 Mon Sep 17 00:00:00 2001 From: Gar Date: Tue, 16 May 2023 09:58:14 -0700 Subject: [PATCH 08/11] deps: postcss-selector-parser@6.0.13 --- node_modules/postcss-selector-parser/API.md | 7 ++----- node_modules/postcss-selector-parser/dist/parser.js | 4 ++++ node_modules/postcss-selector-parser/package.json | 2 +- package-lock.json | 6 +++--- 4 files changed, 10 insertions(+), 9 deletions(-) diff --git a/node_modules/postcss-selector-parser/API.md b/node_modules/postcss-selector-parser/API.md index 36c7298fc9753..64459e3376fba 100644 --- a/node_modules/postcss-selector-parser/API.md +++ b/node_modules/postcss-selector-parser/API.md @@ -278,16 +278,13 @@ if (node.type === 'id') { } ``` -### `node.clone()` +### `node.clone([opts])` Returns a copy of a node, detached from any parent containers that the original might have had. ```js -const cloned = parser.id({value: 'search'}); -String(cloned); - -// => #search +const cloned = node.clone(); ``` ### `node.isAtPosition(line, column)` diff --git a/node_modules/postcss-selector-parser/dist/parser.js b/node_modules/postcss-selector-parser/dist/parser.js index 2a1e72c6c6c00..b4c75e3edc3fe 100644 --- a/node_modules/postcss-selector-parser/dist/parser.js +++ b/node_modules/postcss-selector-parser/dist/parser.js @@ -609,6 +609,9 @@ var Parser = /*#__PURE__*/function () { _proto.unexpected = function unexpected() { return this.error("Unexpected '" + this.content() + "'. Escaping special characters with \\ may help.", this.currToken[_tokenize.FIELDS.START_POS]); }; + _proto.unexpectedPipe = function unexpectedPipe() { + return this.error("Unexpected '|'.", this.currToken[_tokenize.FIELDS.START_POS]); + }; _proto.namespace = function namespace() { var before = this.prevToken && this.content(this.prevToken) || true; if (this.nextToken[_tokenize.FIELDS.TYPE] === tokens.word) { @@ -618,6 +621,7 @@ var Parser = /*#__PURE__*/function () { this.position++; return this.universal(before); } + this.unexpectedPipe(); }; _proto.nesting = function nesting() { if (this.nextToken) { diff --git a/node_modules/postcss-selector-parser/package.json b/node_modules/postcss-selector-parser/package.json index ff9c40960f737..dce071cdcb2b3 100644 --- a/node_modules/postcss-selector-parser/package.json +++ b/node_modules/postcss-selector-parser/package.json @@ -1,6 +1,6 @@ { "name": "postcss-selector-parser", - "version": "6.0.12", + "version": "6.0.13", "devDependencies": { "@babel/cli": "^7.11.6", "@babel/core": "^7.11.6", diff --git a/package-lock.json b/package-lock.json index b89b581290916..6086216ff2f11 100644 --- a/package-lock.json +++ b/package-lock.json @@ -10400,9 +10400,9 @@ "dev": true }, "node_modules/postcss-selector-parser": { - "version": "6.0.12", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.12.tgz", - "integrity": "sha512-NdxGCAZdRrwVI1sy59+Wzrh+pMMHxapGnpfenDVlMEXoOcvt4pGE0JLK9YY2F5dLxcFYA/YbVQKhcGU+FtSYQg==", + "version": "6.0.13", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.13.tgz", + "integrity": "sha512-EaV1Gl4mUEV4ddhDnv/xtj7sxwrwxdetHdWUGnT4VJQf+4d05v6lHYZr8N573k5Z0BViss7BDhfWtKS3+sfAqQ==", "dependencies": { "cssesc": "^3.0.0", "util-deprecate": "^1.0.2" From 58f7ea8ae0764b6919c83b76a435e0b595a094e4 Mon Sep 17 00:00:00 2001 From: Gar Date: Tue, 16 May 2023 09:59:24 -0700 Subject: [PATCH 09/11] deps: readable-stream@4.4.0 --- .../lib/internal/streams/add-abort-signal.js | 30 ++-- .../lib/internal/streams/compose.js | 143 +++++++++++++----- .../lib/internal/streams/destroy.js | 19 ++- .../lib/internal/streams/duplexify.js | 2 - .../lib/internal/streams/end-of-stream.js | 81 ++++++++-- .../lib/internal/streams/operators.js | 25 ++- .../lib/internal/streams/pipeline.js | 113 ++++++++++++-- .../lib/internal/streams/utils.js | 30 +++- .../lib/internal/validators.js | 119 ++++++++++++++- .../readable-stream/lib/ours/primordials.js | 1 + .../readable-stream/lib/stream/promises.js | 11 +- node_modules/readable-stream/package.json | 2 +- package-lock.json | 6 +- 13 files changed, 481 insertions(+), 101 deletions(-) diff --git a/node_modules/readable-stream/lib/internal/streams/add-abort-signal.js b/node_modules/readable-stream/lib/internal/streams/add-abort-signal.js index c6ba8b9c298f1..3a26a1d3e6d76 100644 --- a/node_modules/readable-stream/lib/internal/streams/add-abort-signal.js +++ b/node_modules/readable-stream/lib/internal/streams/add-abort-signal.js @@ -1,6 +1,7 @@ 'use strict' const { AbortError, codes } = require('../../ours/errors') +const { isNodeStream, isWebStream, kControllerErrorFunction } = require('./utils') const eos = require('./end-of-stream') const { ERR_INVALID_ARG_TYPE } = codes @@ -12,13 +13,10 @@ const validateAbortSignal = (signal, name) => { throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal) } } -function isNodeStream(obj) { - return !!(obj && typeof obj.pipe === 'function') -} module.exports.addAbortSignal = function addAbortSignal(signal, stream) { validateAbortSignal(signal, 'signal') - if (!isNodeStream(stream)) { - throw new ERR_INVALID_ARG_TYPE('stream', 'stream.Stream', stream) + if (!isNodeStream(stream) && !isWebStream(stream)) { + throw new ERR_INVALID_ARG_TYPE('stream', ['ReadableStream', 'WritableStream', 'Stream'], stream) } return module.exports.addAbortSignalNoValidate(signal, stream) } @@ -26,13 +24,21 @@ module.exports.addAbortSignalNoValidate = function (signal, stream) { if (typeof signal !== 'object' || !('aborted' in signal)) { return stream } - const onAbort = () => { - stream.destroy( - new AbortError(undefined, { - cause: signal.reason - }) - ) - } + const onAbort = isNodeStream(stream) + ? () => { + stream.destroy( + new AbortError(undefined, { + cause: signal.reason + }) + ) + } + : () => { + stream[kControllerErrorFunction]( + new AbortError(undefined, { + cause: signal.reason + }) + ) + } if (signal.aborted) { onAbort() } else { diff --git a/node_modules/readable-stream/lib/internal/streams/compose.js b/node_modules/readable-stream/lib/internal/streams/compose.js index 4a00aead883c2..f565c12ef3620 100644 --- a/node_modules/readable-stream/lib/internal/streams/compose.js +++ b/node_modules/readable-stream/lib/internal/streams/compose.js @@ -3,11 +3,20 @@ const { pipeline } = require('./pipeline') const Duplex = require('./duplex') const { destroyer } = require('./destroy') -const { isNodeStream, isReadable, isWritable } = require('./utils') +const { + isNodeStream, + isReadable, + isWritable, + isWebStream, + isTransformStream, + isWritableStream, + isReadableStream +} = require('./utils') const { AbortError, codes: { ERR_INVALID_ARG_VALUE, ERR_MISSING_ARGS } } = require('../../ours/errors') +const eos = require('./end-of-stream') module.exports = function compose(...streams) { if (streams.length === 0) { throw new ERR_MISSING_ARGS('streams') @@ -24,14 +33,17 @@ module.exports = function compose(...streams) { streams[idx] = Duplex.from(streams[idx]) } for (let n = 0; n < streams.length; ++n) { - if (!isNodeStream(streams[n])) { + if (!isNodeStream(streams[n]) && !isWebStream(streams[n])) { // TODO(ronag): Add checks for non streams. continue } - if (n < streams.length - 1 && !isReadable(streams[n])) { + if ( + n < streams.length - 1 && + !(isReadable(streams[n]) || isReadableStream(streams[n]) || isTransformStream(streams[n])) + ) { throw new ERR_INVALID_ARG_VALUE(`streams[${n}]`, orgStreams[n], 'must be readable') } - if (n > 0 && !isWritable(streams[n])) { + if (n > 0 && !(isWritable(streams[n]) || isWritableStream(streams[n]) || isTransformStream(streams[n]))) { throw new ERR_INVALID_ARG_VALUE(`streams[${n}]`, orgStreams[n], 'must be writable') } } @@ -53,8 +65,8 @@ module.exports = function compose(...streams) { } const head = streams[0] const tail = pipeline(streams, onfinished) - const writable = !!isWritable(head) - const readable = !!isReadable(tail) + const writable = !!(isWritable(head) || isWritableStream(head) || isTransformStream(head)) + const readable = !!(isReadable(tail) || isReadableStream(tail) || isTransformStream(tail)) // TODO(ronag): Avoid double buffering. // Implement Writable/Readable/Duplex traits. @@ -67,25 +79,49 @@ module.exports = function compose(...streams) { readable }) if (writable) { - d._write = function (chunk, encoding, callback) { - if (head.write(chunk, encoding)) { - callback() - } else { - ondrain = callback + if (isNodeStream(head)) { + d._write = function (chunk, encoding, callback) { + if (head.write(chunk, encoding)) { + callback() + } else { + ondrain = callback + } } - } - d._final = function (callback) { - head.end() - onfinish = callback - } - head.on('drain', function () { - if (ondrain) { - const cb = ondrain - ondrain = null - cb() + d._final = function (callback) { + head.end() + onfinish = callback } - }) - tail.on('finish', function () { + head.on('drain', function () { + if (ondrain) { + const cb = ondrain + ondrain = null + cb() + } + }) + } else if (isWebStream(head)) { + const writable = isTransformStream(head) ? head.writable : head + const writer = writable.getWriter() + d._write = async function (chunk, encoding, callback) { + try { + await writer.ready + writer.write(chunk).catch(() => {}) + callback() + } catch (err) { + callback(err) + } + } + d._final = async function (callback) { + try { + await writer.ready + writer.close().catch(() => {}) + onfinish = callback + } catch (err) { + callback(err) + } + } + } + const toRead = isTransformStream(tail) ? tail.readable : tail + eos(toRead, () => { if (onfinish) { const cb = onfinish onfinish = null @@ -94,25 +130,46 @@ module.exports = function compose(...streams) { }) } if (readable) { - tail.on('readable', function () { - if (onreadable) { - const cb = onreadable - onreadable = null - cb() - } - }) - tail.on('end', function () { - d.push(null) - }) - d._read = function () { - while (true) { - const buf = tail.read() - if (buf === null) { - onreadable = d._read - return + if (isNodeStream(tail)) { + tail.on('readable', function () { + if (onreadable) { + const cb = onreadable + onreadable = null + cb() + } + }) + tail.on('end', function () { + d.push(null) + }) + d._read = function () { + while (true) { + const buf = tail.read() + if (buf === null) { + onreadable = d._read + return + } + if (!d.push(buf)) { + return + } } - if (!d.push(buf)) { - return + } + } else if (isWebStream(tail)) { + const readable = isTransformStream(tail) ? tail.readable : tail + const reader = readable.getReader() + d._read = async function () { + while (true) { + try { + const { value, done } = await reader.read() + if (!d.push(value)) { + return + } + if (done) { + d.push(null) + return + } + } catch { + return + } } } } @@ -128,7 +185,9 @@ module.exports = function compose(...streams) { callback(err) } else { onclose = callback - destroyer(tail, err) + if (isNodeStream(tail)) { + destroyer(tail, err) + } } } return d diff --git a/node_modules/readable-stream/lib/internal/streams/destroy.js b/node_modules/readable-stream/lib/internal/streams/destroy.js index 768f2d79d3a89..db76c29f94bab 100644 --- a/node_modules/readable-stream/lib/internal/streams/destroy.js +++ b/node_modules/readable-stream/lib/internal/streams/destroy.js @@ -36,7 +36,7 @@ function destroy(err, cb) { const w = this._writableState // With duplex streams we use the writable side for state. const s = w || r - if ((w && w.destroyed) || (r && r.destroyed)) { + if ((w !== null && w !== undefined && w.destroyed) || (r !== null && r !== undefined && r.destroyed)) { if (typeof cb === 'function') { cb() } @@ -107,14 +107,14 @@ function emitCloseNT(self) { if (r) { r.closeEmitted = true } - if ((w && w.emitClose) || (r && r.emitClose)) { + if ((w !== null && w !== undefined && w.emitClose) || (r !== null && r !== undefined && r.emitClose)) { self.emit('close') } } function emitErrorNT(self, err) { const r = self._readableState const w = self._writableState - if ((w && w.errorEmitted) || (r && r.errorEmitted)) { + if ((w !== null && w !== undefined && w.errorEmitted) || (r !== null && r !== undefined && r.errorEmitted)) { return } if (w) { @@ -162,10 +162,11 @@ function errorOrDestroy(stream, err, sync) { const r = stream._readableState const w = stream._writableState - if ((w && w.destroyed) || (r && r.destroyed)) { + if ((w !== null && w !== undefined && w.destroyed) || (r !== null && r !== undefined && r.destroyed)) { return this } - if ((r && r.autoDestroy) || (w && w.autoDestroy)) stream.destroy(err) + if ((r !== null && r !== undefined && r.autoDestroy) || (w !== null && w !== undefined && w.autoDestroy)) + stream.destroy(err) else if (err) { // Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364 err.stack // eslint-disable-line no-unused-expressions @@ -228,16 +229,18 @@ function constructNT(stream) { } } try { - stream._construct(onConstruct) + stream._construct((err) => { + process.nextTick(onConstruct, err) + }) } catch (err) { - onConstruct(err) + process.nextTick(onConstruct, err) } } function emitConstructNT(stream) { stream.emit(kConstruct) } function isRequest(stream) { - return stream && stream.setHeader && typeof stream.abort === 'function' + return (stream === null || stream === undefined ? undefined : stream.setHeader) && typeof stream.abort === 'function' } function emitCloseLegacy(stream) { stream.emit('close') diff --git a/node_modules/readable-stream/lib/internal/streams/duplexify.js b/node_modules/readable-stream/lib/internal/streams/duplexify.js index 43300ddc8a45b..599fb47ab53c2 100644 --- a/node_modules/readable-stream/lib/internal/streams/duplexify.js +++ b/node_modules/readable-stream/lib/internal/streams/duplexify.js @@ -282,8 +282,6 @@ function _duplexify(pair) { cb(err) } else if (err) { d.destroy(err) - } else if (!readable && !writable) { - d.destroy() } } diff --git a/node_modules/readable-stream/lib/internal/streams/end-of-stream.js b/node_modules/readable-stream/lib/internal/streams/end-of-stream.js index 57dbaa48a3ca5..043c9c4bdac51 100644 --- a/node_modules/readable-stream/lib/internal/streams/end-of-stream.js +++ b/node_modules/readable-stream/lib/internal/streams/end-of-stream.js @@ -10,20 +10,23 @@ const process = require('process/') const { AbortError, codes } = require('../../ours/errors') const { ERR_INVALID_ARG_TYPE, ERR_STREAM_PREMATURE_CLOSE } = codes const { kEmptyObject, once } = require('../../ours/util') -const { validateAbortSignal, validateFunction, validateObject } = require('../validators') -const { Promise } = require('../../ours/primordials') +const { validateAbortSignal, validateFunction, validateObject, validateBoolean } = require('../validators') +const { Promise, PromisePrototypeThen } = require('../../ours/primordials') const { isClosed, isReadable, isReadableNodeStream, + isReadableStream, isReadableFinished, isReadableErrored, isWritable, isWritableNodeStream, + isWritableStream, isWritableFinished, isWritableErrored, isNodeStream, - willEmitClose: _willEmitClose + willEmitClose: _willEmitClose, + kIsClosedPromise } = require('./utils') function isRequest(stream) { return stream.setHeader && typeof stream.abort === 'function' @@ -42,6 +45,12 @@ function eos(stream, options, callback) { validateFunction(callback, 'callback') validateAbortSignal(options.signal, 'options.signal') callback = once(callback) + if (isReadableStream(stream) || isWritableStream(stream)) { + return eosWeb(stream, options, callback) + } + if (!isNodeStream(stream)) { + throw new ERR_INVALID_ARG_TYPE('stream', ['ReadableStream', 'WritableStream', 'Stream'], stream) + } const readable = (_options$readable = options.readable) !== null && _options$readable !== undefined ? _options$readable @@ -50,10 +59,6 @@ function eos(stream, options, callback) { (_options$writable = options.writable) !== null && _options$writable !== undefined ? _options$writable : isWritableNodeStream(stream) - if (!isNodeStream(stream)) { - // TODO: Webstreams. - throw new ERR_INVALID_ARG_TYPE('stream', 'Stream', stream) - } const wState = stream._writableState const rState = stream._readableState const onlegacyfinish = () => { @@ -117,6 +122,14 @@ function eos(stream, options, callback) { } callback.call(stream) } + const onclosed = () => { + closed = true + const errored = isWritableErrored(stream) || isReadableErrored(stream) + if (errored && typeof errored !== 'boolean') { + return callback.call(stream, errored) + } + callback.call(stream) + } const onrequest = () => { stream.req.on('finish', onfinish) } @@ -153,22 +166,22 @@ function eos(stream, options, callback) { (rState !== null && rState !== undefined && rState.errorEmitted) ) { if (!willEmitClose) { - process.nextTick(onclose) + process.nextTick(onclosed) } } else if ( !readable && (!willEmitClose || isReadable(stream)) && (writableFinished || isWritable(stream) === false) ) { - process.nextTick(onclose) + process.nextTick(onclosed) } else if ( !writable && (!willEmitClose || isWritable(stream)) && (readableFinished || isReadable(stream) === false) ) { - process.nextTick(onclose) + process.nextTick(onclosed) } else if (rState && stream.req && stream.aborted) { - process.nextTick(onclose) + process.nextTick(onclosed) } const cleanup = () => { callback = nop @@ -209,9 +222,53 @@ function eos(stream, options, callback) { } return cleanup } +function eosWeb(stream, options, callback) { + let isAborted = false + let abort = nop + if (options.signal) { + abort = () => { + isAborted = true + callback.call( + stream, + new AbortError(undefined, { + cause: options.signal.reason + }) + ) + } + if (options.signal.aborted) { + process.nextTick(abort) + } else { + const originalCallback = callback + callback = once((...args) => { + options.signal.removeEventListener('abort', abort) + originalCallback.apply(stream, args) + }) + options.signal.addEventListener('abort', abort) + } + } + const resolverFn = (...args) => { + if (!isAborted) { + process.nextTick(() => callback.apply(stream, args)) + } + } + PromisePrototypeThen(stream[kIsClosedPromise].promise, resolverFn, resolverFn) + return nop +} function finished(stream, opts) { + var _opts + let autoCleanup = false + if (opts === null) { + opts = kEmptyObject + } + if ((_opts = opts) !== null && _opts !== undefined && _opts.cleanup) { + validateBoolean(opts.cleanup, 'cleanup') + autoCleanup = opts.cleanup + } return new Promise((resolve, reject) => { - eos(stream, opts, (err) => { + const cleanup = eos(stream, opts, (err) => { + if (autoCleanup) { + cleanup() + } if (err) { reject(err) } else { diff --git a/node_modules/readable-stream/lib/internal/streams/operators.js b/node_modules/readable-stream/lib/internal/streams/operators.js index 323a74a17c32e..869cacb39faca 100644 --- a/node_modules/readable-stream/lib/internal/streams/operators.js +++ b/node_modules/readable-stream/lib/internal/streams/operators.js @@ -2,12 +2,15 @@ const AbortController = globalThis.AbortController || require('abort-controller').AbortController const { - codes: { ERR_INVALID_ARG_TYPE, ERR_MISSING_ARGS, ERR_OUT_OF_RANGE }, + codes: { ERR_INVALID_ARG_VALUE, ERR_INVALID_ARG_TYPE, ERR_MISSING_ARGS, ERR_OUT_OF_RANGE }, AbortError } = require('../../ours/errors') const { validateAbortSignal, validateInteger, validateObject } = require('../validators') const kWeakHandler = require('../../ours/primordials').Symbol('kWeak') const { finished } = require('./end-of-stream') +const staticCompose = require('./compose') +const { addAbortSignalNoValidate } = require('./add-abort-signal') +const { isWritable, isNodeStream } = require('./utils') const { ArrayPrototypePush, MathFloor, @@ -20,6 +23,23 @@ const { } = require('../../ours/primordials') const kEmpty = Symbol('kEmpty') const kEof = Symbol('kEof') +function compose(stream, options) { + if (options != null) { + validateObject(options, 'options') + } + if ((options === null || options === undefined ? undefined : options.signal) != null) { + validateAbortSignal(options.signal, 'options.signal') + } + if (isNodeStream(stream) && !isWritable(stream)) { + throw new ERR_INVALID_ARG_VALUE('stream', stream, 'must be writable') + } + const composedStream = staticCompose(this, stream) + if (options !== null && options !== undefined && options.signal) { + // Not validating as we already validated before + addAbortSignalNoValidate(options.signal, composedStream) + } + return composedStream +} function map(fn, options) { if (typeof fn !== 'function') { throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn) @@ -424,7 +444,8 @@ module.exports.streamReturningOperators = { filter, flatMap, map, - take + take, + compose } module.exports.promiseReturningOperators = { every, diff --git a/node_modules/readable-stream/lib/internal/streams/pipeline.js b/node_modules/readable-stream/lib/internal/streams/pipeline.js index 016e96ee6ff24..8393ba5146991 100644 --- a/node_modules/readable-stream/lib/internal/streams/pipeline.js +++ b/node_modules/readable-stream/lib/internal/streams/pipeline.js @@ -24,7 +24,16 @@ const { AbortError } = require('../../ours/errors') const { validateFunction, validateAbortSignal } = require('../validators') -const { isIterable, isReadable, isReadableNodeStream, isNodeStream } = require('./utils') +const { + isIterable, + isReadable, + isReadableNodeStream, + isNodeStream, + isTransformStream, + isWebStream, + isReadableStream, + isReadableEnded +} = require('./utils') const AbortController = globalThis.AbortController || require('abort-controller').AbortController let PassThrough let Readable @@ -74,7 +83,7 @@ async function* fromReadable(val) { } yield* Readable.prototype[SymbolAsyncIterator].call(val) } -async function pump(iterable, writable, finish, { end }) { +async function pumpToNode(iterable, writable, finish, { end }) { let error let onresolve = null const resume = (err) => { @@ -130,6 +139,31 @@ async function pump(iterable, writable, finish, { end }) { writable.off('drain', resume) } } +async function pumpToWeb(readable, writable, finish, { end }) { + if (isTransformStream(writable)) { + writable = writable.writable + } + // https://streams.spec.whatwg.org/#example-manual-write-with-backpressure + const writer = writable.getWriter() + try { + for await (const chunk of readable) { + await writer.ready + writer.write(chunk).catch(() => {}) + } + await writer.ready + if (end) { + await writer.close() + } + finish() + } catch (err) { + try { + await writer.abort(err) + finish(err) + } catch (err) { + finish(err) + } + } +} function pipeline(...streams) { return pipelineImpl(streams, once(popCallback(streams))) } @@ -215,13 +249,18 @@ function pipelineImpl(streams, callback, opts) { if (!isIterable(ret)) { throw new ERR_INVALID_RETURN_VALUE('Iterable, AsyncIterable or Stream', 'source', ret) } - } else if (isIterable(stream) || isReadableNodeStream(stream)) { + } else if (isIterable(stream) || isReadableNodeStream(stream) || isTransformStream(stream)) { ret = stream } else { ret = Duplex.from(stream) } } else if (typeof stream === 'function') { - ret = makeAsyncIterable(ret) + if (isTransformStream(ret)) { + var _ret + ret = makeAsyncIterable((_ret = ret) === null || _ret === undefined ? undefined : _ret.readable) + } else { + ret = makeAsyncIterable(ret) + } ret = stream(ret, { signal }) @@ -230,7 +269,7 @@ function pipelineImpl(streams, callback, opts) { throw new ERR_INVALID_RETURN_VALUE('AsyncIterable', `transform[${i - 1}]`, ret) } } else { - var _ret + var _ret2 if (!PassThrough) { PassThrough = require('./passthrough') } @@ -246,7 +285,7 @@ function pipelineImpl(streams, callback, opts) { // Handle Promises/A+ spec, `then` could be a getter that throws on // second use. - const then = (_ret = ret) === null || _ret === undefined ? undefined : _ret.then + const then = (_ret2 = ret) === null || _ret2 === undefined ? undefined : _ret2.then if (typeof then === 'function') { finishCount++ then.call( @@ -268,7 +307,13 @@ function pipelineImpl(streams, callback, opts) { ) } else if (isIterable(ret, true)) { finishCount++ - pump(ret, pt, finish, { + pumpToNode(ret, pt, finish, { + end + }) + } else if (isReadableStream(ret) || isTransformStream(ret)) { + const toRead = ret.readable || ret + finishCount++ + pumpToNode(toRead, pt, finish, { end }) } else { @@ -290,13 +335,47 @@ function pipelineImpl(streams, callback, opts) { if (isReadable(stream) && isLastStream) { lastStreamCleanup.push(cleanup) } + } else if (isTransformStream(ret) || isReadableStream(ret)) { + const toRead = ret.readable || ret + finishCount++ + pumpToNode(toRead, stream, finish, { + end + }) } else if (isIterable(ret)) { finishCount++ - pump(ret, stream, finish, { + pumpToNode(ret, stream, finish, { end }) } else { - throw new ERR_INVALID_ARG_TYPE('val', ['Readable', 'Iterable', 'AsyncIterable'], ret) + throw new ERR_INVALID_ARG_TYPE( + 'val', + ['Readable', 'Iterable', 'AsyncIterable', 'ReadableStream', 'TransformStream'], + ret + ) + } + ret = stream + } else if (isWebStream(stream)) { + if (isReadableNodeStream(ret)) { + finishCount++ + pumpToWeb(makeAsyncIterable(ret), stream, finish, { + end + }) + } else if (isReadableStream(ret) || isIterable(ret)) { + finishCount++ + pumpToWeb(ret, stream, finish, { + end + }) + } else if (isTransformStream(ret)) { + finishCount++ + pumpToWeb(ret.readable, stream, finish, { + end + }) + } else { + throw new ERR_INVALID_ARG_TYPE( + 'val', + ['Readable', 'Iterable', 'AsyncIterable', 'ReadableStream', 'TransformStream'], + ret + ) } ret = stream } else { @@ -320,16 +399,24 @@ function pipe(src, dst, finish, { end }) { } }) src.pipe(dst, { - end - }) + end: false + }) // If end is true we already will have a listener to end dst. + if (end) { // Compat. Before node v10.12.0 stdio used to throw an error so // pipe() did/does not end() stdio destinations. // Now they allow it but "secretly" don't close the underlying fd. - src.once('end', () => { + + function endFn() { ended = true dst.end() - }) + } + if (isReadableEnded(src)) { + // End the destination if the source has already ended. + process.nextTick(endFn) + } else { + src.once('end', endFn) + } } else { finish() } diff --git a/node_modules/readable-stream/lib/internal/streams/utils.js b/node_modules/readable-stream/lib/internal/streams/utils.js index f87e9fe68e6a8..e589ad96c6924 100644 --- a/node_modules/readable-stream/lib/internal/streams/utils.js +++ b/node_modules/readable-stream/lib/internal/streams/utils.js @@ -1,10 +1,12 @@ 'use strict' -const { Symbol, SymbolAsyncIterator, SymbolIterator } = require('../../ours/primordials') +const { Symbol, SymbolAsyncIterator, SymbolIterator, SymbolFor } = require('../../ours/primordials') const kDestroyed = Symbol('kDestroyed') const kIsErrored = Symbol('kIsErrored') const kIsReadable = Symbol('kIsReadable') const kIsDisturbed = Symbol('kIsDisturbed') +const kIsClosedPromise = SymbolFor('nodejs.webstream.isClosedPromise') +const kControllerErrorFunction = SymbolFor('nodejs.webstream.controllerErrorFunction') function isReadableNodeStream(obj, strict = false) { var _obj$_readableState return !!( @@ -56,6 +58,24 @@ function isNodeStream(obj) { (typeof obj.pipe === 'function' && typeof obj.on === 'function')) ) } +function isReadableStream(obj) { + return !!( + obj && + !isNodeStream(obj) && + typeof obj.pipeThrough === 'function' && + typeof obj.getReader === 'function' && + typeof obj.cancel === 'function' + ) +} +function isWritableStream(obj) { + return !!(obj && !isNodeStream(obj) && typeof obj.getWriter === 'function' && typeof obj.abort === 'function') +} +function isTransformStream(obj) { + return !!(obj && !isNodeStream(obj) && typeof obj.readable === 'object' && typeof obj.writable === 'object') +} +function isWebStream(obj) { + return isReadableStream(obj) || isWritableStream(obj) || isTransformStream(obj) +} function isIterable(obj, isAsync) { if (obj == null) return false if (isAsync === true) return typeof obj[SymbolAsyncIterator] === 'function' @@ -274,22 +294,28 @@ module.exports = { kIsErrored, isReadable, kIsReadable, + kIsClosedPromise, + kControllerErrorFunction, isClosed, isDestroyed, isDuplexNodeStream, isFinished, isIterable, isReadableNodeStream, + isReadableStream, isReadableEnded, isReadableFinished, isReadableErrored, isNodeStream, + isWebStream, isWritable, isWritableNodeStream, + isWritableStream, isWritableEnded, isWritableFinished, isWritableErrored, isServerRequest, isServerResponse, - willEmitClose + willEmitClose, + isTransformStream } diff --git a/node_modules/readable-stream/lib/internal/validators.js b/node_modules/readable-stream/lib/internal/validators.js index f9e6e555971a1..85b2e9cd593d9 100644 --- a/node_modules/readable-stream/lib/internal/validators.js +++ b/node_modules/readable-stream/lib/internal/validators.js @@ -1,3 +1,5 @@ +/* eslint jsdoc/require-jsdoc: "error" */ + 'use strict' const { @@ -199,6 +201,13 @@ const validateOneOf = hideStackFrames((value, name, oneOf) => { function validateBoolean(value, name) { if (typeof value !== 'boolean') throw new ERR_INVALID_ARG_TYPE(name, 'boolean', value) } + +/** + * @param {any} options + * @param {string} key + * @param {boolean} defaultValue + * @returns {boolean} + */ function getOwnPropertyValueOrDefault(options, key, defaultValue) { return options == null || !ObjectPrototypeHasOwnProperty(options, key) ? defaultValue : options[key] } @@ -228,6 +237,24 @@ const validateObject = hideStackFrames((value, name, options = null) => { } }) +/** + * @callback validateDictionary - We are using the Web IDL Standard definition + * of "dictionary" here, which means any value + * whose Type is either Undefined, Null, or + * Object (which includes functions). + * @param {*} value + * @param {string} name + * @see https://webidl.spec.whatwg.org/#es-dictionary + * @see https://tc39.es/ecma262/#table-typeof-operator-results + */ + +/** @type {validateDictionary} */ +const validateDictionary = hideStackFrames((value, name) => { + if (value != null && typeof value !== 'object' && typeof value !== 'function') { + throw new ERR_INVALID_ARG_TYPE(name, 'a dictionary', value) + } +}) + /** * @callback validateArray * @param {*} value @@ -247,7 +274,36 @@ const validateArray = hideStackFrames((value, name, minLength = 0) => { } }) -// eslint-disable-next-line jsdoc/require-returns-check +/** + * @callback validateStringArray + * @param {*} value + * @param {string} name + * @returns {asserts value is string[]} + */ + +/** @type {validateStringArray} */ +function validateStringArray(value, name) { + validateArray(value, name) + for (let i = 0; i < value.length; i++) { + validateString(value[i], `${name}[${i}]`) + } +} + +/** + * @callback validateBooleanArray + * @param {*} value + * @param {string} name + * @returns {asserts value is boolean[]} + */ + +/** @type {validateBooleanArray} */ +function validateBooleanArray(value, name) { + validateArray(value, name) + for (let i = 0; i < value.length; i++) { + validateBoolean(value[i], `${name}[${i}]`) + } +} + /** * @param {*} signal * @param {string} [name='signal'] @@ -370,13 +426,71 @@ function validateUnion(value, name, union) { throw new ERR_INVALID_ARG_TYPE(name, `('${ArrayPrototypeJoin(union, '|')}')`, value) } } + +/* + The rules for the Link header field are described here: + https://www.rfc-editor.org/rfc/rfc8288.html#section-3 + + This regex validates any string surrounded by angle brackets + (not necessarily a valid URI reference) followed by zero or more + link-params separated by semicolons. +*/ +const linkValueRegExp = /^(?:<[^>]*>)(?:\s*;\s*[^;"\s]+(?:=(")?[^;"\s]*\1)?)*$/ + +/** + * @param {any} value + * @param {string} name + */ +function validateLinkHeaderFormat(value, name) { + if (typeof value === 'undefined' || !RegExpPrototypeExec(linkValueRegExp, value)) { + throw new ERR_INVALID_ARG_VALUE( + name, + value, + 'must be an array or string of format "; rel=preload; as=style"' + ) + } +} + +/** + * @param {any} hints + * @return {string} + */ +function validateLinkHeaderValue(hints) { + if (typeof hints === 'string') { + validateLinkHeaderFormat(hints, 'hints') + return hints + } else if (ArrayIsArray(hints)) { + const hintsLength = hints.length + let result = '' + if (hintsLength === 0) { + return result + } + for (let i = 0; i < hintsLength; i++) { + const link = hints[i] + validateLinkHeaderFormat(link, 'hints') + result += link + if (i !== hintsLength - 1) { + result += ', ' + } + } + return result + } + throw new ERR_INVALID_ARG_VALUE( + 'hints', + hints, + 'must be an array or string of format "; rel=preload; as=style"' + ) +} module.exports = { isInt32, isUint32, parseFileMode, validateArray, + validateStringArray, + validateBooleanArray, validateBoolean, validateBuffer, + validateDictionary, validateEncoding, validateFunction, validateInt32, @@ -391,5 +505,6 @@ module.exports = { validateUint32, validateUndefined, validateUnion, - validateAbortSignal + validateAbortSignal, + validateLinkHeaderValue } diff --git a/node_modules/readable-stream/lib/ours/primordials.js b/node_modules/readable-stream/lib/ours/primordials.js index 6a98b01681caf..9464cc7fea6a1 100644 --- a/node_modules/readable-stream/lib/ours/primordials.js +++ b/node_modules/readable-stream/lib/ours/primordials.js @@ -90,6 +90,7 @@ module.exports = { return self.trim() }, Symbol, + SymbolFor: Symbol.for, SymbolAsyncIterator: Symbol.asyncIterator, SymbolHasInstance: Symbol.hasInstance, SymbolIterator: Symbol.iterator, diff --git a/node_modules/readable-stream/lib/stream/promises.js b/node_modules/readable-stream/lib/stream/promises.js index d44dd8ad0e0f3..b85c51f47f1ce 100644 --- a/node_modules/readable-stream/lib/stream/promises.js +++ b/node_modules/readable-stream/lib/stream/promises.js @@ -1,15 +1,22 @@ 'use strict' const { ArrayPrototypePop, Promise } = require('../ours/primordials') -const { isIterable, isNodeStream } = require('../internal/streams/utils') +const { isIterable, isNodeStream, isWebStream } = require('../internal/streams/utils') const { pipelineImpl: pl } = require('../internal/streams/pipeline') const { finished } = require('../internal/streams/end-of-stream') +require('stream') function pipeline(...streams) { return new Promise((resolve, reject) => { let signal let end const lastArg = streams[streams.length - 1] - if (lastArg && typeof lastArg === 'object' && !isNodeStream(lastArg) && !isIterable(lastArg)) { + if ( + lastArg && + typeof lastArg === 'object' && + !isNodeStream(lastArg) && + !isIterable(lastArg) && + !isWebStream(lastArg) + ) { const options = ArrayPrototypePop(streams) signal = options.signal end = options.end diff --git a/node_modules/readable-stream/package.json b/node_modules/readable-stream/package.json index 7df83d9eb990a..c4f6504cc7cc6 100644 --- a/node_modules/readable-stream/package.json +++ b/node_modules/readable-stream/package.json @@ -1,6 +1,6 @@ { "name": "readable-stream", - "version": "4.3.0", + "version": "4.4.0", "description": "Node.js Streams, a user-land copy of the stream library from Node.js", "homepage": "https://github.com/nodejs/readable-stream", "license": "MIT", diff --git a/package-lock.json b/package-lock.json index 6086216ff2f11..2fa7b733d2736 100644 --- a/package-lock.json +++ b/package-lock.json @@ -10777,9 +10777,9 @@ } }, "node_modules/readable-stream": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.3.0.tgz", - "integrity": "sha512-MuEnA0lbSi7JS8XM+WNJlWZkHAAdm7gETHdFK//Q/mChGyj2akEFtdLZh32jSdkWGbRwCW9pn6g3LWDdDeZnBQ==", + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.4.0.tgz", + "integrity": "sha512-kDMOq0qLtxV9f/SQv522h8cxZBqNZXuXNyjyezmfAAuribMyVXziljpQ/uQhfE1XLg2/TLTW2DsnoE4VAi/krg==", "inBundle": true, "dependencies": { "abort-controller": "^3.0.0", From 4e2f586bd02ae5bdb847ad6100663329dbff5783 Mon Sep 17 00:00:00 2001 From: Gar Date: Tue, 16 May 2023 10:01:27 -0700 Subject: [PATCH 10/11] deps: tuf-js@1.1.6 --- DEPENDENCIES.md | 1 + node_modules/tuf-js/dist/fetcher.js | 3 +++ node_modules/tuf-js/dist/updater.js | 8 ++++++++ node_modules/tuf-js/package.json | 10 ++++++---- package-lock.json | 7 ++++--- 5 files changed, 22 insertions(+), 7 deletions(-) diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md index a7df0ec604f95..192c9cdf7903c 100644 --- a/DEPENDENCIES.md +++ b/DEPENDENCIES.md @@ -789,6 +789,7 @@ graph LR; tar-->minizlib; tar-->mkdirp; tar-->yallist; + tuf-js-->debug; tuf-js-->make-fetch-happen; tuf-js-->tufjs-models["@tufjs/models"]; tufjs-models-->minimatch; diff --git a/node_modules/tuf-js/dist/fetcher.js b/node_modules/tuf-js/dist/fetcher.js index 7a7405ac53e72..d3dcf53eeb869 100644 --- a/node_modules/tuf-js/dist/fetcher.js +++ b/node_modules/tuf-js/dist/fetcher.js @@ -4,11 +4,13 @@ var __importDefault = (this && this.__importDefault) || function (mod) { }; Object.defineProperty(exports, "__esModule", { value: true }); exports.DefaultFetcher = exports.BaseFetcher = void 0; +const debug_1 = __importDefault(require("debug")); const fs_1 = __importDefault(require("fs")); const make_fetch_happen_1 = __importDefault(require("make-fetch-happen")); const util_1 = __importDefault(require("util")); const error_1 = require("./error"); const tmpfile_1 = require("./utils/tmpfile"); +const log = (0, debug_1.default)('tuf:fetch'); class BaseFetcher { // Download file from given URL. The file is downloaded to a temporary // location and then passed to the given handler. The handler is responsible @@ -58,6 +60,7 @@ class DefaultFetcher extends BaseFetcher { this.retries = options.retries; } async fetch(url) { + log('GET %s', url); const response = await (0, make_fetch_happen_1.default)(url, { timeout: this.timeout, retry: this.retries, diff --git a/node_modules/tuf-js/dist/updater.js b/node_modules/tuf-js/dist/updater.js index 68243e554facb..71fa4981e3122 100644 --- a/node_modules/tuf-js/dist/updater.js +++ b/node_modules/tuf-js/dist/updater.js @@ -22,9 +22,13 @@ var __importStar = (this && this.__importStar) || function (mod) { __setModuleDefault(result, mod); return result; }; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.Updater = void 0; const models_1 = require("@tufjs/models"); +const debug_1 = __importDefault(require("debug")); const fs = __importStar(require("fs")); const path = __importStar(require("path")); const config_1 = require("./config"); @@ -32,6 +36,7 @@ const error_1 = require("./error"); const fetcher_1 = require("./fetcher"); const store_1 = require("./store"); const url = __importStar(require("./utils/url")); +const log = (0, debug_1.default)('tuf:cache'); class Updater { constructor(options) { const { metadataDir, metadataBaseUrl, targetDir, targetBaseUrl, fetcher, config, } = options; @@ -86,6 +91,7 @@ class Updater { // Verify hashes and length of downloaded file await targetInfo.verify(fs.createReadStream(fileName)); // Copy file to target path + log('WRITE %s', targetPath); fs.copyFileSync(fileName, targetPath); }); return targetPath; @@ -107,6 +113,7 @@ class Updater { } loadLocalMetadata(fileName) { const filePath = path.join(this.dir, `${fileName}.json`); + log('READ %s', filePath); return fs.readFileSync(filePath); } // Sequentially load and persist on local disk every newer root metadata @@ -300,6 +307,7 @@ class Updater { async persistMetadata(metaDataName, bytesData) { try { const filePath = path.join(this.dir, `${metaDataName}.json`); + log('WRITE %s', filePath); fs.writeFileSync(filePath, bytesData.toString('utf8')); } catch (error) { diff --git a/node_modules/tuf-js/package.json b/node_modules/tuf-js/package.json index ab84004c49403..c1134af2b2ff3 100644 --- a/node_modules/tuf-js/package.json +++ b/node_modules/tuf-js/package.json @@ -1,6 +1,6 @@ { "name": "tuf-js", - "version": "1.1.5", + "version": "1.1.6", "description": "JavaScript implementation of The Update Framework (TUF)", "main": "dist/index.js", "types": "dist/index.d.ts", @@ -29,14 +29,16 @@ "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/client#readme", "devDependencies": { "@tufjs/repo-mock": "1.3.1", + "@types/debug": "^4.1.7", "@types/make-fetch-happen": "^10.0.1", - "@types/node": "^18.16.3", + "@types/node": "^20.1.1", "nock": "^13.3.1", "typescript": "^5.0.4" }, "dependencies": { - "make-fetch-happen": "^11.1.0", - "@tufjs/models": "1.0.4" + "@tufjs/models": "1.0.4", + "debug": "^4.3.4", + "make-fetch-happen": "^11.1.0" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" diff --git a/package-lock.json b/package-lock.json index 2fa7b733d2736..903dd857b7a57 100644 --- a/package-lock.json +++ b/package-lock.json @@ -14518,12 +14518,13 @@ } }, "node_modules/tuf-js": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-1.1.5.tgz", - "integrity": "sha512-inqodgxdsmuxrtQVbu6tPNgRKWD1Boy3VB6GO7KczJZpAHiTukwhSzXUSzvDcw5pE2Jo8ua+e1ykpHv7VdPVlQ==", + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-1.1.6.tgz", + "integrity": "sha512-CXwFVIsXGbVY4vFiWF7TJKWmlKJAT8TWkH4RmiohJRcDJInix++F0dznDmoVbtJNzZ8yLprKUG4YrDIhv3nBMg==", "inBundle": true, "dependencies": { "@tufjs/models": "1.0.4", + "debug": "^4.3.4", "make-fetch-happen": "^11.1.0" }, "engines": { From ff2cece94a1f5aaa595923e9347ec47886e70219 Mon Sep 17 00:00:00 2001 From: Gar Date: Tue, 16 May 2023 11:41:28 -0700 Subject: [PATCH 11/11] deps: cacache@17.1.2 --- node_modules/cacache/lib/content/write.js | 32 ++++++++++++++++------- node_modules/cacache/package.json | 6 ++--- package-lock.json | 8 +++--- package.json | 2 +- 4 files changed, 31 insertions(+), 17 deletions(-) diff --git a/node_modules/cacache/lib/content/write.js b/node_modules/cacache/lib/content/write.js index b6f5c5623b58b..7146146581287 100644 --- a/node_modules/cacache/lib/content/write.js +++ b/node_modules/cacache/lib/content/write.js @@ -15,6 +15,9 @@ const fsm = require('fs-minipass') module.exports = write +// Cache of move operations in process so we don't duplicate +const moveOperations = new Map() + async function write (cache, data, opts = {}) { const { algorithms, size, integrity } = opts @@ -159,16 +162,27 @@ async function makeTmp (cache, opts) { async function moveToDestination (tmp, cache, sri, opts) { const destination = contentPath(cache, sri) const destDir = path.dirname(destination) - - await fs.mkdir(destDir, { recursive: true }) - try { - await moveFile(tmp.target, destination, { overwrite: false }) - tmp.moved = true - } catch (err) { - if (!err.message.startsWith('The destination file exists')) { - throw Object.assign(err, { code: 'EEXIST' }) - } + if (moveOperations.has(destination)) { + return moveOperations.get(destination) } + moveOperations.set( + destination, + fs.mkdir(destDir, { recursive: true }) + .then(async () => { + await moveFile(tmp.target, destination, { overwrite: false }) + tmp.moved = true + return tmp.moved + }) + .catch(err => { + if (!err.message.startsWith('The destination file exists')) { + throw Object.assign(err, { code: 'EEXIST' }) + } + }).finally(() => { + moveOperations.delete(destination) + }) + + ) + return moveOperations.get(destination) } function sizeError (expected, found) { diff --git a/node_modules/cacache/package.json b/node_modules/cacache/package.json index b8ee783388d84..db17e3a41bc5e 100644 --- a/node_modules/cacache/package.json +++ b/node_modules/cacache/package.json @@ -1,6 +1,6 @@ { "name": "cacache", - "version": "17.1.0", + "version": "17.1.2", "cache-version": { "content": "2", "index": "5" @@ -60,7 +60,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.14.1", + "@npmcli/template-oss": "4.15.1", "tap": "^16.0.0" }, "engines": { @@ -69,7 +69,7 @@ "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "windowsCI": false, - "version": "4.14.1", + "version": "4.15.1", "publish": "true" }, "author": "GitHub Inc.", diff --git a/package-lock.json b/package-lock.json index 903dd857b7a57..ad07549f25920 100644 --- a/package-lock.json +++ b/package-lock.json @@ -91,7 +91,7 @@ "@npmcli/run-script": "^6.0.2", "abbrev": "^2.0.0", "archy": "~1.0.0", - "cacache": "^17.1.0", + "cacache": "^17.1.2", "chalk": "^4.1.2", "ci-info": "^3.8.0", "cli-columns": "^4.0.0", @@ -3288,9 +3288,9 @@ } }, "node_modules/cacache": { - "version": "17.1.0", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.0.tgz", - "integrity": "sha512-hXpFU+Z3AfVmNuiLve1qxWHMq0RSIt5gjCKAHi/M6DktwFwDdAXAtunl1i4WSKaaVcU9IsRvXFg42jTHigcC6Q==", + "version": "17.1.2", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.2.tgz", + "integrity": "sha512-VcRDUtZd9r7yfGDpdm3dBDBSQbLd19IqWs9q1tuB9g6kmxYLwIjfLngRKMCfDHxReuf0SBclRuYn66Xds7jzUQ==", "inBundle": true, "dependencies": { "@npmcli/fs": "^3.1.0", diff --git a/package.json b/package.json index 5faef77f95891..123ecc8fd54c0 100644 --- a/package.json +++ b/package.json @@ -60,7 +60,7 @@ "@npmcli/run-script": "^6.0.2", "abbrev": "^2.0.0", "archy": "~1.0.0", - "cacache": "^17.1.0", + "cacache": "^17.1.2", "chalk": "^4.1.2", "ci-info": "^3.8.0", "cli-columns": "^4.0.0",