From bc7385800591f8840e14a381366aa00375605665 Mon Sep 17 00:00:00 2001 From: Paul Rogers Date: Wed, 29 Jan 2025 11:18:40 -0500 Subject: [PATCH 1/5] Add major and minor tagging too --- actions/push-images/__tests__/docker.test.ts | 66 +++++++++++++++++++- actions/push-images/src/docker.ts | 29 ++++++++- 2 files changed, 91 insertions(+), 4 deletions(-) diff --git a/actions/push-images/__tests__/docker.test.ts b/actions/push-images/__tests__/docker.test.ts index 3f8b6257..94bb82f0 100644 --- a/actions/push-images/__tests__/docker.test.ts +++ b/actions/push-images/__tests__/docker.test.ts @@ -157,65 +157,129 @@ describe('buildCommands', () => { 'docker load -i /images/fluentd-2.9.4-linux-amd64.tar', 'docker tag grafana/fluentd:2.9.4-amd64 grafana/fluentd:latest-amd64', 'docker push grafana/fluentd:latest-amd64', + 'docker tag grafana/fluentd:2.9.4-amd64 grafana/fluentd:v2-amd64', + 'docker push grafana/fluentd:v2-amd64', + 'docker tag grafana/fluentd:2.9.4-amd64 grafana/fluentd:v2.9-amd64', + 'docker push grafana/fluentd:v2.9-amd64', 'docker push -a grafana/fluentd', 'docker manifest create grafana/fluentd:2.9.4 grafana/fluentd:2.9.4-amd64', 'docker manifest push grafana/fluentd:2.9.4', 'docker manifest create grafana/fluentd:latest grafana/fluentd:latest-amd64', 'docker manifest push grafana/fluentd:latest', + 'docker manifest create grafana/fluentd:v2 grafana/fluentd:v2-amd64', + 'docker manifest push grafana/fluentd:v2', + 'docker manifest create grafana/fluentd:v2.9 grafana/fluentd:v2.9-amd64', + 'docker manifest push grafana/fluentd:v2.9', 'docker load -i /images/logcli-2.9.4-linux-amd64.tar', 'docker tag grafana/logcli:2.9.4-amd64 grafana/logcli:latest-amd64', 'docker push grafana/logcli:latest-amd64', + 'docker tag grafana/logcli:2.9.4-amd64 grafana/logcli:v2-amd64', + 'docker push grafana/logcli:v2-amd64', + 'docker tag grafana/logcli:2.9.4-amd64 grafana/logcli:v2.9-amd64', + 'docker push grafana/logcli:v2.9-amd64', 'docker load -i /images/logcli-2.9.4-linux-arm.tar', 'docker tag grafana/logcli:2.9.4-arm grafana/logcli:latest-arm', 'docker push grafana/logcli:latest-arm', + 'docker tag grafana/logcli:2.9.4-arm grafana/logcli:v2-arm', + 'docker push grafana/logcli:v2-arm', + 'docker tag grafana/logcli:2.9.4-arm grafana/logcli:v2.9-arm', + 'docker push grafana/logcli:v2.9-arm', 'docker load -i /images/logcli-2.9.4-linux-arm64.tar', 'docker tag grafana/logcli:2.9.4-arm64 grafana/logcli:latest-arm64', 'docker push grafana/logcli:latest-arm64', + 'docker tag grafana/logcli:2.9.4-arm64 grafana/logcli:v2-arm64', + 'docker push grafana/logcli:v2-arm64', + 'docker tag grafana/logcli:2.9.4-arm64 grafana/logcli:v2.9-arm64', + 'docker push grafana/logcli:v2.9-arm64', 'docker push -a grafana/logcli', 'docker manifest create grafana/logcli:2.9.4 grafana/logcli:2.9.4-amd64 grafana/logcli:2.9.4-arm grafana/logcli:2.9.4-arm64', 'docker manifest push grafana/logcli:2.9.4', 'docker manifest create grafana/logcli:latest grafana/logcli:latest-amd64 grafana/logcli:latest-arm grafana/logcli:latest-arm64', 'docker manifest push grafana/logcli:latest', + 'docker manifest create grafana/logcli:v2 grafana/logcli:v2-amd64 grafana/logcli:v2-arm grafana/logcli:v2-arm64', + 'docker manifest push grafana/logcli:v2', + 'docker manifest create grafana/logcli:v2.9 grafana/logcli:v2.9-amd64 grafana/logcli:v2.9-arm grafana/logcli:v2.9-arm64', + 'docker manifest push grafana/logcli:v2.9', 'docker load -i /images/logstash-2.9.4-linux-amd64.tar', 'docker tag grafana/logstash:2.9.4-amd64 grafana/logstash:latest-amd64', 'docker push grafana/logstash:latest-amd64', + 'docker tag grafana/logstash:2.9.4-amd64 grafana/logstash:v2-amd64', + 'docker push grafana/logstash:v2-amd64', + 'docker tag grafana/logstash:2.9.4-amd64 grafana/logstash:v2.9-amd64', + 'docker push grafana/logstash:v2.9-amd64', 'docker push -a grafana/logstash', 'docker manifest create grafana/logstash:2.9.4 grafana/logstash:2.9.4-amd64', 'docker manifest push grafana/logstash:2.9.4', 'docker manifest create grafana/logstash:latest grafana/logstash:latest-amd64', 'docker manifest push grafana/logstash:latest', + 'docker manifest create grafana/logstash:v2 grafana/logstash:v2-amd64', + 'docker manifest push grafana/logstash:v2', + 'docker manifest create grafana/logstash:v2.9 grafana/logstash:v2.9-amd64', + 'docker manifest push grafana/logstash:v2.9', 'docker load -i /images/loki-2.9.4-linux-amd64.tar', 'docker tag grafana/loki:2.9.4-amd64 grafana/loki:latest-amd64', 'docker push grafana/loki:latest-amd64', + 'docker tag grafana/loki:2.9.4-amd64 grafana/loki:v2-amd64', + 'docker push grafana/loki:v2-amd64', + 'docker tag grafana/loki:2.9.4-amd64 grafana/loki:v2.9-amd64', + 'docker push grafana/loki:v2.9-amd64', 'docker load -i /images/loki-2.9.4-linux-arm.tar', 'docker tag grafana/loki:2.9.4-arm grafana/loki:latest-arm', 'docker push grafana/loki:latest-arm', + 'docker tag grafana/loki:2.9.4-arm grafana/loki:v2-arm', + 'docker push grafana/loki:v2-arm', + 'docker tag grafana/loki:2.9.4-arm grafana/loki:v2.9-arm', + 'docker push grafana/loki:v2.9-arm', 'docker load -i /images/loki-2.9.4-linux-arm64.tar', 'docker tag grafana/loki:2.9.4-arm64 grafana/loki:latest-arm64', 'docker push grafana/loki:latest-arm64', + 'docker tag grafana/loki:2.9.4-arm64 grafana/loki:v2-arm64', + 'docker push grafana/loki:v2-arm64', + 'docker tag grafana/loki:2.9.4-arm64 grafana/loki:v2.9-arm64', + 'docker push grafana/loki:v2.9-arm64', 'docker push -a grafana/loki', 'docker manifest create grafana/loki:2.9.4 grafana/loki:2.9.4-amd64 grafana/loki:2.9.4-arm grafana/loki:2.9.4-arm64', 'docker manifest push grafana/loki:2.9.4', 'docker manifest create grafana/loki:latest grafana/loki:latest-amd64 grafana/loki:latest-arm grafana/loki:latest-arm64', 'docker manifest push grafana/loki:latest', + 'docker manifest create grafana/loki:v2 grafana/loki:v2-amd64 grafana/loki:v2-arm grafana/loki:v2-arm64', + 'docker manifest push grafana/loki:v2', + 'docker manifest create grafana/loki:v2.9 grafana/loki:v2.9-amd64 grafana/loki:v2.9-arm grafana/loki:v2.9-arm64', + 'docker manifest push grafana/loki:v2.9', 'docker load -i /images/loki-canary-2.9.4-linux-amd64.tar', 'docker tag grafana/loki-canary:2.9.4-amd64 grafana/loki-canary:latest-amd64', 'docker push grafana/loki-canary:latest-amd64', + 'docker tag grafana/loki-canary:2.9.4-amd64 grafana/loki-canary:v2-amd64', + 'docker push grafana/loki-canary:v2-amd64', + 'docker tag grafana/loki-canary:2.9.4-amd64 grafana/loki-canary:v2.9-amd64', + 'docker push grafana/loki-canary:v2.9-amd64', 'docker load -i /images/loki-canary-2.9.4-linux-arm.tar', 'docker tag grafana/loki-canary:2.9.4-arm grafana/loki-canary:latest-arm', 'docker push grafana/loki-canary:latest-arm', + 'docker tag grafana/loki-canary:2.9.4-arm grafana/loki-canary:v2-arm', + 'docker push grafana/loki-canary:v2-arm', + 'docker tag grafana/loki-canary:2.9.4-arm grafana/loki-canary:v2.9-arm', + 'docker push grafana/loki-canary:v2.9-arm', 'docker load -i /images/loki-canary-2.9.4-linux-arm64.tar', 'docker tag grafana/loki-canary:2.9.4-arm64 grafana/loki-canary:latest-arm64', 'docker push grafana/loki-canary:latest-arm64', + 'docker tag grafana/loki-canary:2.9.4-arm64 grafana/loki-canary:v2-arm64', + 'docker push grafana/loki-canary:v2-arm64', + 'docker tag grafana/loki-canary:2.9.4-arm64 grafana/loki-canary:v2.9-arm64', + 'docker push grafana/loki-canary:v2.9-arm64', 'docker push -a grafana/loki-canary', 'docker manifest create grafana/loki-canary:2.9.4 grafana/loki-canary:2.9.4-amd64 grafana/loki-canary:2.9.4-arm grafana/loki-canary:2.9.4-arm64', 'docker manifest push grafana/loki-canary:2.9.4', 'docker manifest create grafana/loki-canary:latest grafana/loki-canary:latest-amd64 grafana/loki-canary:latest-arm grafana/loki-canary:latest-arm64', - 'docker manifest push grafana/loki-canary:latest' + 'docker manifest push grafana/loki-canary:latest', + 'docker manifest create grafana/loki-canary:v2 grafana/loki-canary:v2-amd64 grafana/loki-canary:v2-arm grafana/loki-canary:v2-arm64', + 'docker manifest push grafana/loki-canary:v2', + 'docker manifest create grafana/loki-canary:v2.9 grafana/loki-canary:v2.9-amd64 grafana/loki-canary:v2.9-arm grafana/loki-canary:v2.9-arm64', + 'docker manifest push grafana/loki-canary:v2.9' ]) }) diff --git a/actions/push-images/src/docker.ts b/actions/push-images/src/docker.ts index 32c03a4f..2fd67689 100644 --- a/actions/push-images/src/docker.ts +++ b/actions/push-images/src/docker.ts @@ -59,7 +59,13 @@ export function buildDockerPluginCommands( if (isLatest) { commands.push( `docker plugin create ${repo}/${image}:latest-${shortPlatform} "${buildDir}"`, - `docker plugin push "${repo}/${image}:latest-${shortPlatform}"` + `docker plugin push "${repo}/${image}:latest-${shortPlatform}"`, + // Add major version tag (e.g., v2-amd64) + `docker plugin create ${repo}/${image}:v${version.major}-${shortPlatform} "${buildDir}"`, + `docker plugin push "${repo}/${image}:v${version.major}-${shortPlatform}"`, + // Add major.minor version tag (e.g., v2.9-amd64) + `docker plugin create ${repo}/${image}:v${version.major}.${version.minor}-${shortPlatform} "${buildDir}"`, + `docker plugin push "${repo}/${image}:v${version.major}.${version.minor}-${shortPlatform}"` ) } } @@ -115,7 +121,13 @@ export function buildCommands( if (isLatest) { commands.push( `docker tag ${repo}/${image}:${version.toString()}-${shortPlatform} ${repo}/${image}:latest-${shortPlatform}`, - `docker push ${repo}/${image}:latest-${shortPlatform}` + `docker push ${repo}/${image}:latest-${shortPlatform}`, + // Add major version tag (e.g., v2-amd64) + `docker tag ${repo}/${image}:${version.toString()}-${shortPlatform} ${repo}/${image}:v${version.major}-${shortPlatform}`, + `docker push ${repo}/${image}:v${version.major}-${shortPlatform}`, + // Add major.minor version tag (e.g., v2.9-amd64) + `docker tag ${repo}/${image}:${version.toString()}-${shortPlatform} ${repo}/${image}:v${version.major}.${version.minor}-${shortPlatform}`, + `docker push ${repo}/${image}:v${version.major}.${version.minor}-${shortPlatform}` ) } } @@ -131,9 +143,20 @@ export function buildCommands( const latestManifests = manifests.map(m => m.replace(`:${version.toString()}-`, ':latest-') ) + const majorManifests = manifests.map(m => + m.replace(`:${version.toString()}-`, `:v${version.major}-`) + ) + const majorMinorManifests = manifests.map(m => + m.replace(`:${version.toString()}-`, `:v${version.major}.${version.minor}-`) + ) + commands.push( `docker manifest create ${repo}/${image}:latest ${latestManifests.join(' ')}`, - `docker manifest push ${repo}/${image}:latest` + `docker manifest push ${repo}/${image}:latest`, + `docker manifest create ${repo}/${image}:v${version.major} ${majorManifests.join(' ')}`, + `docker manifest push ${repo}/${image}:v${version.major}`, + `docker manifest create ${repo}/${image}:v${version.major}.${version.minor} ${majorMinorManifests.join(' ')}`, + `docker manifest push ${repo}/${image}:v${version.major}.${version.minor}` ) } } From 5bfe24e298189563a00689f3ce50ac9fbc3070d1 Mon Sep 17 00:00:00 2001 From: Paul Rogers Date: Wed, 29 Jan 2025 11:23:42 -0500 Subject: [PATCH 2/5] lint --- actions/push-images/__tests__/docker.test.ts | 4 ++-- actions/push-images/src/docker.ts | 5 ++++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/actions/push-images/__tests__/docker.test.ts b/actions/push-images/__tests__/docker.test.ts index 94bb82f0..626b5cd7 100644 --- a/actions/push-images/__tests__/docker.test.ts +++ b/actions/push-images/__tests__/docker.test.ts @@ -184,7 +184,7 @@ describe('buildCommands', () => { 'docker tag grafana/logcli:2.9.4-arm grafana/logcli:v2-arm', 'docker push grafana/logcli:v2-arm', 'docker tag grafana/logcli:2.9.4-arm grafana/logcli:v2.9-arm', - 'docker push grafana/logcli:v2.9-arm', + 'docker push grafana/logcli:v2.9-arm', 'docker load -i /images/logcli-2.9.4-linux-arm64.tar', 'docker tag grafana/logcli:2.9.4-arm64 grafana/logcli:latest-arm64', 'docker push grafana/logcli:latest-arm64', @@ -229,7 +229,7 @@ describe('buildCommands', () => { 'docker load -i /images/loki-2.9.4-linux-arm.tar', 'docker tag grafana/loki:2.9.4-arm grafana/loki:latest-arm', 'docker push grafana/loki:latest-arm', - 'docker tag grafana/loki:2.9.4-arm grafana/loki:v2-arm', + 'docker tag grafana/loki:2.9.4-arm grafana/loki:v2-arm', 'docker push grafana/loki:v2-arm', 'docker tag grafana/loki:2.9.4-arm grafana/loki:v2.9-arm', 'docker push grafana/loki:v2.9-arm', diff --git a/actions/push-images/src/docker.ts b/actions/push-images/src/docker.ts index 2fd67689..68628f24 100644 --- a/actions/push-images/src/docker.ts +++ b/actions/push-images/src/docker.ts @@ -147,7 +147,10 @@ export function buildCommands( m.replace(`:${version.toString()}-`, `:v${version.major}-`) ) const majorMinorManifests = manifests.map(m => - m.replace(`:${version.toString()}-`, `:v${version.major}.${version.minor}-`) + m.replace( + `:${version.toString()}-`, + `:v${version.major}.${version.minor}-` + ) ) commands.push( From 95d9f1e1fd37bcd2b8d53497fdbe41f7d551cb39 Mon Sep 17 00:00:00 2001 From: Paul Rogers Date: Wed, 29 Jan 2025 11:25:27 -0500 Subject: [PATCH 3/5] run actions --- actions/install-binary/dist/index.js | 318 ++++--------------------- actions/push-images/dist/index.js | 334 +++++---------------------- actions/should-release/dist/index.js | 318 ++++--------------------- 3 files changed, 139 insertions(+), 831 deletions(-) diff --git a/actions/install-binary/dist/index.js b/actions/install-binary/dist/index.js index 5921f147..b5c322bd 100644 --- a/actions/install-binary/dist/index.js +++ b/actions/install-binary/dist/index.js @@ -12308,132 +12308,6 @@ function onConnectTimeout (socket) { module.exports = buildConnector -/***/ }), - -/***/ 4462: -/***/ ((module) => { - -"use strict"; - - -/** @type {Record} */ -const headerNameLowerCasedRecord = {} - -// https://developer.mozilla.org/docs/Web/HTTP/Headers -const wellknownHeaderNames = [ - 'Accept', - 'Accept-Encoding', - 'Accept-Language', - 'Accept-Ranges', - 'Access-Control-Allow-Credentials', - 'Access-Control-Allow-Headers', - 'Access-Control-Allow-Methods', - 'Access-Control-Allow-Origin', - 'Access-Control-Expose-Headers', - 'Access-Control-Max-Age', - 'Access-Control-Request-Headers', - 'Access-Control-Request-Method', - 'Age', - 'Allow', - 'Alt-Svc', - 'Alt-Used', - 'Authorization', - 'Cache-Control', - 'Clear-Site-Data', - 'Connection', - 'Content-Disposition', - 'Content-Encoding', - 'Content-Language', - 'Content-Length', - 'Content-Location', - 'Content-Range', - 'Content-Security-Policy', - 'Content-Security-Policy-Report-Only', - 'Content-Type', - 'Cookie', - 'Cross-Origin-Embedder-Policy', - 'Cross-Origin-Opener-Policy', - 'Cross-Origin-Resource-Policy', - 'Date', - 'Device-Memory', - 'Downlink', - 'ECT', - 'ETag', - 'Expect', - 'Expect-CT', - 'Expires', - 'Forwarded', - 'From', - 'Host', - 'If-Match', - 'If-Modified-Since', - 'If-None-Match', - 'If-Range', - 'If-Unmodified-Since', - 'Keep-Alive', - 'Last-Modified', - 'Link', - 'Location', - 'Max-Forwards', - 'Origin', - 'Permissions-Policy', - 'Pragma', - 'Proxy-Authenticate', - 'Proxy-Authorization', - 'RTT', - 'Range', - 'Referer', - 'Referrer-Policy', - 'Refresh', - 'Retry-After', - 'Sec-WebSocket-Accept', - 'Sec-WebSocket-Extensions', - 'Sec-WebSocket-Key', - 'Sec-WebSocket-Protocol', - 'Sec-WebSocket-Version', - 'Server', - 'Server-Timing', - 'Service-Worker-Allowed', - 'Service-Worker-Navigation-Preload', - 'Set-Cookie', - 'SourceMap', - 'Strict-Transport-Security', - 'Supports-Loading-Mode', - 'TE', - 'Timing-Allow-Origin', - 'Trailer', - 'Transfer-Encoding', - 'Upgrade', - 'Upgrade-Insecure-Requests', - 'User-Agent', - 'Vary', - 'Via', - 'WWW-Authenticate', - 'X-Content-Type-Options', - 'X-DNS-Prefetch-Control', - 'X-Frame-Options', - 'X-Permitted-Cross-Domain-Policies', - 'X-Powered-By', - 'X-Requested-With', - 'X-XSS-Protection' -] - -for (let i = 0; i < wellknownHeaderNames.length; ++i) { - const key = wellknownHeaderNames[i] - const lowerCasedKey = key.toLowerCase() - headerNameLowerCasedRecord[key] = headerNameLowerCasedRecord[lowerCasedKey] = - lowerCasedKey -} - -// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`. -Object.setPrototypeOf(headerNameLowerCasedRecord, null) - -module.exports = { - wellknownHeaderNames, - headerNameLowerCasedRecord -} - - /***/ }), /***/ 8045: @@ -13266,7 +13140,6 @@ const { InvalidArgumentError } = __nccwpck_require__(8045) const { Blob } = __nccwpck_require__(4300) const nodeUtil = __nccwpck_require__(3837) const { stringify } = __nccwpck_require__(3477) -const { headerNameLowerCasedRecord } = __nccwpck_require__(4462) const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v)) @@ -13476,15 +13349,6 @@ function parseKeepAliveTimeout (val) { return m ? parseInt(m[1], 10) * 1000 : null } -/** - * Retrieves a header name and returns its lowercase value. - * @param {string | Buffer} value Header name - * @returns {string} - */ -function headerNameToString (value) { - return headerNameLowerCasedRecord[value] || value.toLowerCase() -} - function parseHeaders (headers, obj = {}) { // For H2 support if (!Array.isArray(headers)) return headers @@ -13756,7 +13620,6 @@ module.exports = { isIterable, isAsyncIterable, isDestroyed, - headerNameToString, parseRawHeaders, parseHeaders, parseKeepAliveTimeout, @@ -14036,14 +13899,6 @@ const { isUint8Array, isArrayBuffer } = __nccwpck_require__(9830) const { File: UndiciFile } = __nccwpck_require__(8511) const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(685) -let random -try { - const crypto = __nccwpck_require__(6005) - random = (max) => crypto.randomInt(0, max) -} catch { - random = (max) => Math.floor(Math.random(max)) -} - let ReadableStream = globalThis.ReadableStream /** @type {globalThis['File']} */ @@ -14129,7 +13984,7 @@ function extractBody (object, keepalive = false) { // Set source to a copy of the bytes held by object. source = new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength)) } else if (util.isFormDataLike(object)) { - const boundary = `----formdata-undici-0${`${random(1e11)}`.padStart(11, '0')}` + const boundary = `----formdata-undici-0${`${Math.floor(Math.random() * 1e11)}`.padStart(11, '0')}` const prefix = `--${boundary}\r\nContent-Disposition: form-data` /*! formdata-polyfill. MIT License. Jimmy Wärting */ @@ -17901,9 +17756,6 @@ function httpRedirectFetch (fetchParams, response) { // https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name request.headersList.delete('authorization') - // https://fetch.spec.whatwg.org/#authentication-entries - request.headersList.delete('proxy-authorization', true) - // "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement. request.headersList.delete('cookie') request.headersList.delete('host') @@ -20412,18 +20264,14 @@ const { isBlobLike, toUSVString, ReadableStreamFrom } = __nccwpck_require__(3983 const assert = __nccwpck_require__(9491) const { isUint8Array } = __nccwpck_require__(9830) -let supportedHashes = [] - // https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable /** @type {import('crypto')|undefined} */ let crypto try { crypto = __nccwpck_require__(6113) - const possibleRelevantHashes = ['sha256', 'sha384', 'sha512'] - supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash)) -/* c8 ignore next 3 */ } catch { + } function responseURL (response) { @@ -20951,56 +20799,66 @@ function bytesMatch (bytes, metadataList) { return true } - // 3. If response is not eligible for integrity validation, return false. - // TODO - - // 4. If parsedMetadata is the empty set, return true. + // 3. If parsedMetadata is the empty set, return true. if (parsedMetadata.length === 0) { return true } - // 5. Let metadata be the result of getting the strongest + // 4. Let metadata be the result of getting the strongest // metadata from parsedMetadata. - const strongest = getStrongestMetadata(parsedMetadata) - const metadata = filterMetadataListByAlgorithm(parsedMetadata, strongest) + const list = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo)) + // get the strongest algorithm + const strongest = list[0].algo + // get all entries that use the strongest algorithm; ignore weaker + const metadata = list.filter((item) => item.algo === strongest) - // 6. For each item in metadata: + // 5. For each item in metadata: for (const item of metadata) { // 1. Let algorithm be the alg component of item. const algorithm = item.algo // 2. Let expectedValue be the val component of item. - const expectedValue = item.hash + let expectedValue = item.hash // See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e // "be liberal with padding". This is annoying, and it's not even in the spec. + if (expectedValue.endsWith('==')) { + expectedValue = expectedValue.slice(0, -2) + } + // 3. Let actualValue be the result of applying algorithm to bytes. let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64') - if (actualValue[actualValue.length - 1] === '=') { - if (actualValue[actualValue.length - 2] === '=') { - actualValue = actualValue.slice(0, -2) - } else { - actualValue = actualValue.slice(0, -1) - } + if (actualValue.endsWith('==')) { + actualValue = actualValue.slice(0, -2) } // 4. If actualValue is a case-sensitive match for expectedValue, // return true. - if (compareBase64Mixed(actualValue, expectedValue)) { + if (actualValue === expectedValue) { + return true + } + + let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url') + + if (actualBase64URL.endsWith('==')) { + actualBase64URL = actualBase64URL.slice(0, -2) + } + + if (actualBase64URL === expectedValue) { return true } } - // 7. Return false. + // 6. Return false. return false } // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options // https://www.w3.org/TR/CSP2/#source-list-syntax // https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1 -const parseHashWithOptions = /(?sha256|sha384|sha512)-((?[A-Za-z0-9+/]+|[A-Za-z0-9_-]+)={0,2}(?:\s|$)( +[!-~]*)?)?/i +const parseHashWithOptions = /((?sha256|sha384|sha512)-(?[A-z0-9+/]{1}.*={0,2}))( +[\x21-\x7e]?)?/i /** * @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata @@ -21014,6 +20872,8 @@ function parseMetadata (metadata) { // 2. Let empty be equal to true. let empty = true + const supportedHashes = crypto.getHashes() + // 3. For each token returned by splitting metadata on spaces: for (const token of metadata.split(' ')) { // 1. Set empty to false. @@ -21023,11 +20883,7 @@ function parseMetadata (metadata) { const parsedToken = parseHashWithOptions.exec(token) // 3. If token does not parse, continue to the next token. - if ( - parsedToken === null || - parsedToken.groups === undefined || - parsedToken.groups.algo === undefined - ) { + if (parsedToken === null || parsedToken.groups === undefined) { // Note: Chromium blocks the request at this point, but Firefox // gives a warning that an invalid integrity was given. The // correct behavior is to ignore these, and subsequently not @@ -21036,11 +20892,11 @@ function parseMetadata (metadata) { } // 4. Let algorithm be the hash-algo component of token. - const algorithm = parsedToken.groups.algo.toLowerCase() + const algorithm = parsedToken.groups.algo // 5. If algorithm is a hash function recognized by the user // agent, add the parsed token to result. - if (supportedHashes.includes(algorithm)) { + if (supportedHashes.includes(algorithm.toLowerCase())) { result.push(parsedToken.groups) } } @@ -21053,82 +20909,6 @@ function parseMetadata (metadata) { return result } -/** - * @param {{ algo: 'sha256' | 'sha384' | 'sha512' }[]} metadataList - */ -function getStrongestMetadata (metadataList) { - // Let algorithm be the algo component of the first item in metadataList. - // Can be sha256 - let algorithm = metadataList[0].algo - // If the algorithm is sha512, then it is the strongest - // and we can return immediately - if (algorithm[3] === '5') { - return algorithm - } - - for (let i = 1; i < metadataList.length; ++i) { - const metadata = metadataList[i] - // If the algorithm is sha512, then it is the strongest - // and we can break the loop immediately - if (metadata.algo[3] === '5') { - algorithm = 'sha512' - break - // If the algorithm is sha384, then a potential sha256 or sha384 is ignored - } else if (algorithm[3] === '3') { - continue - // algorithm is sha256, check if algorithm is sha384 and if so, set it as - // the strongest - } else if (metadata.algo[3] === '3') { - algorithm = 'sha384' - } - } - return algorithm -} - -function filterMetadataListByAlgorithm (metadataList, algorithm) { - if (metadataList.length === 1) { - return metadataList - } - - let pos = 0 - for (let i = 0; i < metadataList.length; ++i) { - if (metadataList[i].algo === algorithm) { - metadataList[pos++] = metadataList[i] - } - } - - metadataList.length = pos - - return metadataList -} - -/** - * Compares two base64 strings, allowing for base64url - * in the second string. - * -* @param {string} actualValue always base64 - * @param {string} expectedValue base64 or base64url - * @returns {boolean} - */ -function compareBase64Mixed (actualValue, expectedValue) { - if (actualValue.length !== expectedValue.length) { - return false - } - for (let i = 0; i < actualValue.length; ++i) { - if (actualValue[i] !== expectedValue[i]) { - if ( - (actualValue[i] === '+' && expectedValue[i] === '-') || - (actualValue[i] === '/' && expectedValue[i] === '_') - ) { - continue - } - return false - } - } - - return true -} - // https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) { // TODO @@ -21544,8 +21324,7 @@ module.exports = { urlHasHttpsScheme, urlIsHttpHttpsScheme, readAllBytes, - normalizeMethodRecord, - parseMetadata + normalizeMethodRecord } @@ -23632,17 +23411,12 @@ function parseLocation (statusCode, headers) { // https://tools.ietf.org/html/rfc7231#section-6.4.4 function shouldRemoveHeader (header, removeContent, unknownOrigin) { - if (header.length === 4) { - return util.headerNameToString(header) === 'host' - } - if (removeContent && util.headerNameToString(header).startsWith('content-')) { - return true - } - if (unknownOrigin && (header.length === 13 || header.length === 6 || header.length === 19)) { - const name = util.headerNameToString(header) - return name === 'authorization' || name === 'cookie' || name === 'proxy-authorization' - } - return false + return ( + (header.length === 4 && header.toString().toLowerCase() === 'host') || + (removeContent && header.toString().toLowerCase().indexOf('content-') === 0) || + (unknownOrigin && header.length === 13 && header.toString().toLowerCase() === 'authorization') || + (unknownOrigin && header.length === 6 && header.toString().toLowerCase() === 'cookie') + ) } // https://tools.ietf.org/html/rfc7231#section-6.4 @@ -28969,14 +28743,6 @@ module.exports = require("net"); /***/ }), -/***/ 6005: -/***/ ((module) => { - -"use strict"; -module.exports = require("node:crypto"); - -/***/ }), - /***/ 5673: /***/ ((module) => { diff --git a/actions/push-images/dist/index.js b/actions/push-images/dist/index.js index b13accbc..2969e5f3 100644 --- a/actions/push-images/dist/index.js +++ b/actions/push-images/dist/index.js @@ -8446,132 +8446,6 @@ function onConnectTimeout (socket) { module.exports = buildConnector -/***/ }), - -/***/ 4462: -/***/ ((module) => { - -"use strict"; - - -/** @type {Record} */ -const headerNameLowerCasedRecord = {} - -// https://developer.mozilla.org/docs/Web/HTTP/Headers -const wellknownHeaderNames = [ - 'Accept', - 'Accept-Encoding', - 'Accept-Language', - 'Accept-Ranges', - 'Access-Control-Allow-Credentials', - 'Access-Control-Allow-Headers', - 'Access-Control-Allow-Methods', - 'Access-Control-Allow-Origin', - 'Access-Control-Expose-Headers', - 'Access-Control-Max-Age', - 'Access-Control-Request-Headers', - 'Access-Control-Request-Method', - 'Age', - 'Allow', - 'Alt-Svc', - 'Alt-Used', - 'Authorization', - 'Cache-Control', - 'Clear-Site-Data', - 'Connection', - 'Content-Disposition', - 'Content-Encoding', - 'Content-Language', - 'Content-Length', - 'Content-Location', - 'Content-Range', - 'Content-Security-Policy', - 'Content-Security-Policy-Report-Only', - 'Content-Type', - 'Cookie', - 'Cross-Origin-Embedder-Policy', - 'Cross-Origin-Opener-Policy', - 'Cross-Origin-Resource-Policy', - 'Date', - 'Device-Memory', - 'Downlink', - 'ECT', - 'ETag', - 'Expect', - 'Expect-CT', - 'Expires', - 'Forwarded', - 'From', - 'Host', - 'If-Match', - 'If-Modified-Since', - 'If-None-Match', - 'If-Range', - 'If-Unmodified-Since', - 'Keep-Alive', - 'Last-Modified', - 'Link', - 'Location', - 'Max-Forwards', - 'Origin', - 'Permissions-Policy', - 'Pragma', - 'Proxy-Authenticate', - 'Proxy-Authorization', - 'RTT', - 'Range', - 'Referer', - 'Referrer-Policy', - 'Refresh', - 'Retry-After', - 'Sec-WebSocket-Accept', - 'Sec-WebSocket-Extensions', - 'Sec-WebSocket-Key', - 'Sec-WebSocket-Protocol', - 'Sec-WebSocket-Version', - 'Server', - 'Server-Timing', - 'Service-Worker-Allowed', - 'Service-Worker-Navigation-Preload', - 'Set-Cookie', - 'SourceMap', - 'Strict-Transport-Security', - 'Supports-Loading-Mode', - 'TE', - 'Timing-Allow-Origin', - 'Trailer', - 'Transfer-Encoding', - 'Upgrade', - 'Upgrade-Insecure-Requests', - 'User-Agent', - 'Vary', - 'Via', - 'WWW-Authenticate', - 'X-Content-Type-Options', - 'X-DNS-Prefetch-Control', - 'X-Frame-Options', - 'X-Permitted-Cross-Domain-Policies', - 'X-Powered-By', - 'X-Requested-With', - 'X-XSS-Protection' -] - -for (let i = 0; i < wellknownHeaderNames.length; ++i) { - const key = wellknownHeaderNames[i] - const lowerCasedKey = key.toLowerCase() - headerNameLowerCasedRecord[key] = headerNameLowerCasedRecord[lowerCasedKey] = - lowerCasedKey -} - -// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`. -Object.setPrototypeOf(headerNameLowerCasedRecord, null) - -module.exports = { - wellknownHeaderNames, - headerNameLowerCasedRecord -} - - /***/ }), /***/ 8045: @@ -9404,7 +9278,6 @@ const { InvalidArgumentError } = __nccwpck_require__(8045) const { Blob } = __nccwpck_require__(4300) const nodeUtil = __nccwpck_require__(3837) const { stringify } = __nccwpck_require__(3477) -const { headerNameLowerCasedRecord } = __nccwpck_require__(4462) const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v)) @@ -9614,15 +9487,6 @@ function parseKeepAliveTimeout (val) { return m ? parseInt(m[1], 10) * 1000 : null } -/** - * Retrieves a header name and returns its lowercase value. - * @param {string | Buffer} value Header name - * @returns {string} - */ -function headerNameToString (value) { - return headerNameLowerCasedRecord[value] || value.toLowerCase() -} - function parseHeaders (headers, obj = {}) { // For H2 support if (!Array.isArray(headers)) return headers @@ -9894,7 +9758,6 @@ module.exports = { isIterable, isAsyncIterable, isDestroyed, - headerNameToString, parseRawHeaders, parseHeaders, parseKeepAliveTimeout, @@ -10174,14 +10037,6 @@ const { isUint8Array, isArrayBuffer } = __nccwpck_require__(9830) const { File: UndiciFile } = __nccwpck_require__(8511) const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(685) -let random -try { - const crypto = __nccwpck_require__(6005) - random = (max) => crypto.randomInt(0, max) -} catch { - random = (max) => Math.floor(Math.random(max)) -} - let ReadableStream = globalThis.ReadableStream /** @type {globalThis['File']} */ @@ -10267,7 +10122,7 @@ function extractBody (object, keepalive = false) { // Set source to a copy of the bytes held by object. source = new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength)) } else if (util.isFormDataLike(object)) { - const boundary = `----formdata-undici-0${`${random(1e11)}`.padStart(11, '0')}` + const boundary = `----formdata-undici-0${`${Math.floor(Math.random() * 1e11)}`.padStart(11, '0')}` const prefix = `--${boundary}\r\nContent-Disposition: form-data` /*! formdata-polyfill. MIT License. Jimmy Wärting */ @@ -14039,9 +13894,6 @@ function httpRedirectFetch (fetchParams, response) { // https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name request.headersList.delete('authorization') - // https://fetch.spec.whatwg.org/#authentication-entries - request.headersList.delete('proxy-authorization', true) - // "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement. request.headersList.delete('cookie') request.headersList.delete('host') @@ -16550,18 +16402,14 @@ const { isBlobLike, toUSVString, ReadableStreamFrom } = __nccwpck_require__(3983 const assert = __nccwpck_require__(9491) const { isUint8Array } = __nccwpck_require__(9830) -let supportedHashes = [] - // https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable /** @type {import('crypto')|undefined} */ let crypto try { crypto = __nccwpck_require__(6113) - const possibleRelevantHashes = ['sha256', 'sha384', 'sha512'] - supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash)) -/* c8 ignore next 3 */ } catch { + } function responseURL (response) { @@ -17089,56 +16937,66 @@ function bytesMatch (bytes, metadataList) { return true } - // 3. If response is not eligible for integrity validation, return false. - // TODO - - // 4. If parsedMetadata is the empty set, return true. + // 3. If parsedMetadata is the empty set, return true. if (parsedMetadata.length === 0) { return true } - // 5. Let metadata be the result of getting the strongest + // 4. Let metadata be the result of getting the strongest // metadata from parsedMetadata. - const strongest = getStrongestMetadata(parsedMetadata) - const metadata = filterMetadataListByAlgorithm(parsedMetadata, strongest) + const list = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo)) + // get the strongest algorithm + const strongest = list[0].algo + // get all entries that use the strongest algorithm; ignore weaker + const metadata = list.filter((item) => item.algo === strongest) - // 6. For each item in metadata: + // 5. For each item in metadata: for (const item of metadata) { // 1. Let algorithm be the alg component of item. const algorithm = item.algo // 2. Let expectedValue be the val component of item. - const expectedValue = item.hash + let expectedValue = item.hash // See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e // "be liberal with padding". This is annoying, and it's not even in the spec. + if (expectedValue.endsWith('==')) { + expectedValue = expectedValue.slice(0, -2) + } + // 3. Let actualValue be the result of applying algorithm to bytes. let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64') - if (actualValue[actualValue.length - 1] === '=') { - if (actualValue[actualValue.length - 2] === '=') { - actualValue = actualValue.slice(0, -2) - } else { - actualValue = actualValue.slice(0, -1) - } + if (actualValue.endsWith('==')) { + actualValue = actualValue.slice(0, -2) } // 4. If actualValue is a case-sensitive match for expectedValue, // return true. - if (compareBase64Mixed(actualValue, expectedValue)) { + if (actualValue === expectedValue) { + return true + } + + let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url') + + if (actualBase64URL.endsWith('==')) { + actualBase64URL = actualBase64URL.slice(0, -2) + } + + if (actualBase64URL === expectedValue) { return true } } - // 7. Return false. + // 6. Return false. return false } // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options // https://www.w3.org/TR/CSP2/#source-list-syntax // https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1 -const parseHashWithOptions = /(?sha256|sha384|sha512)-((?[A-Za-z0-9+/]+|[A-Za-z0-9_-]+)={0,2}(?:\s|$)( +[!-~]*)?)?/i +const parseHashWithOptions = /((?sha256|sha384|sha512)-(?[A-z0-9+/]{1}.*={0,2}))( +[\x21-\x7e]?)?/i /** * @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata @@ -17152,6 +17010,8 @@ function parseMetadata (metadata) { // 2. Let empty be equal to true. let empty = true + const supportedHashes = crypto.getHashes() + // 3. For each token returned by splitting metadata on spaces: for (const token of metadata.split(' ')) { // 1. Set empty to false. @@ -17161,11 +17021,7 @@ function parseMetadata (metadata) { const parsedToken = parseHashWithOptions.exec(token) // 3. If token does not parse, continue to the next token. - if ( - parsedToken === null || - parsedToken.groups === undefined || - parsedToken.groups.algo === undefined - ) { + if (parsedToken === null || parsedToken.groups === undefined) { // Note: Chromium blocks the request at this point, but Firefox // gives a warning that an invalid integrity was given. The // correct behavior is to ignore these, and subsequently not @@ -17174,11 +17030,11 @@ function parseMetadata (metadata) { } // 4. Let algorithm be the hash-algo component of token. - const algorithm = parsedToken.groups.algo.toLowerCase() + const algorithm = parsedToken.groups.algo // 5. If algorithm is a hash function recognized by the user // agent, add the parsed token to result. - if (supportedHashes.includes(algorithm)) { + if (supportedHashes.includes(algorithm.toLowerCase())) { result.push(parsedToken.groups) } } @@ -17191,82 +17047,6 @@ function parseMetadata (metadata) { return result } -/** - * @param {{ algo: 'sha256' | 'sha384' | 'sha512' }[]} metadataList - */ -function getStrongestMetadata (metadataList) { - // Let algorithm be the algo component of the first item in metadataList. - // Can be sha256 - let algorithm = metadataList[0].algo - // If the algorithm is sha512, then it is the strongest - // and we can return immediately - if (algorithm[3] === '5') { - return algorithm - } - - for (let i = 1; i < metadataList.length; ++i) { - const metadata = metadataList[i] - // If the algorithm is sha512, then it is the strongest - // and we can break the loop immediately - if (metadata.algo[3] === '5') { - algorithm = 'sha512' - break - // If the algorithm is sha384, then a potential sha256 or sha384 is ignored - } else if (algorithm[3] === '3') { - continue - // algorithm is sha256, check if algorithm is sha384 and if so, set it as - // the strongest - } else if (metadata.algo[3] === '3') { - algorithm = 'sha384' - } - } - return algorithm -} - -function filterMetadataListByAlgorithm (metadataList, algorithm) { - if (metadataList.length === 1) { - return metadataList - } - - let pos = 0 - for (let i = 0; i < metadataList.length; ++i) { - if (metadataList[i].algo === algorithm) { - metadataList[pos++] = metadataList[i] - } - } - - metadataList.length = pos - - return metadataList -} - -/** - * Compares two base64 strings, allowing for base64url - * in the second string. - * -* @param {string} actualValue always base64 - * @param {string} expectedValue base64 or base64url - * @returns {boolean} - */ -function compareBase64Mixed (actualValue, expectedValue) { - if (actualValue.length !== expectedValue.length) { - return false - } - for (let i = 0; i < actualValue.length; ++i) { - if (actualValue[i] !== expectedValue[i]) { - if ( - (actualValue[i] === '+' && expectedValue[i] === '-') || - (actualValue[i] === '/' && expectedValue[i] === '_') - ) { - continue - } - return false - } - } - - return true -} - // https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) { // TODO @@ -17682,8 +17462,7 @@ module.exports = { urlHasHttpsScheme, urlIsHttpHttpsScheme, readAllBytes, - normalizeMethodRecord, - parseMetadata + normalizeMethodRecord } @@ -19770,17 +19549,12 @@ function parseLocation (statusCode, headers) { // https://tools.ietf.org/html/rfc7231#section-6.4.4 function shouldRemoveHeader (header, removeContent, unknownOrigin) { - if (header.length === 4) { - return util.headerNameToString(header) === 'host' - } - if (removeContent && util.headerNameToString(header).startsWith('content-')) { - return true - } - if (unknownOrigin && (header.length === 13 || header.length === 6 || header.length === 19)) { - const name = util.headerNameToString(header) - return name === 'authorization' || name === 'cookie' || name === 'proxy-authorization' - } - return false + return ( + (header.length === 4 && header.toString().toLowerCase() === 'host') || + (removeContent && header.toString().toLowerCase().indexOf('content-') === 0) || + (unknownOrigin && header.length === 13 && header.toString().toLowerCase() === 'authorization') || + (unknownOrigin && header.length === 6 && header.toString().toLowerCase() === 'cookie') + ) } // https://tools.ietf.org/html/rfc7231#section-6.4 @@ -24973,7 +24747,11 @@ function buildDockerPluginCommands(repo, buildDir, imageDir, files, isLatest) { commands.push(`docker plugin push "${repo}/${image}:${version.toString()}-${shortPlatform}"`); // Add latest tag for each platform if this is the latest version if (isLatest) { - commands.push(`docker plugin create ${repo}/${image}:latest-${shortPlatform} "${buildDir}"`, `docker plugin push "${repo}/${image}:latest-${shortPlatform}"`); + commands.push(`docker plugin create ${repo}/${image}:latest-${shortPlatform} "${buildDir}"`, `docker plugin push "${repo}/${image}:latest-${shortPlatform}"`, + // Add major version tag (e.g., v2-amd64) + `docker plugin create ${repo}/${image}:v${version.major}-${shortPlatform} "${buildDir}"`, `docker plugin push "${repo}/${image}:v${version.major}-${shortPlatform}"`, + // Add major.minor version tag (e.g., v2.9-amd64) + `docker plugin create ${repo}/${image}:v${version.major}.${version.minor}-${shortPlatform} "${buildDir}"`, `docker plugin push "${repo}/${image}:v${version.major}.${version.minor}-${shortPlatform}"`); } } } @@ -25016,14 +24794,20 @@ function buildCommands(repo, imageDir, files, isLatest) { manifests.push(`${repo}/${image}:${version.toString()}-${shortPlatform}`); // Add latest tag for each platform if this is the latest version if (isLatest) { - commands.push(`docker tag ${repo}/${image}:${version.toString()}-${shortPlatform} ${repo}/${image}:latest-${shortPlatform}`, `docker push ${repo}/${image}:latest-${shortPlatform}`); + commands.push(`docker tag ${repo}/${image}:${version.toString()}-${shortPlatform} ${repo}/${image}:latest-${shortPlatform}`, `docker push ${repo}/${image}:latest-${shortPlatform}`, + // Add major version tag (e.g., v2-amd64) + `docker tag ${repo}/${image}:${version.toString()}-${shortPlatform} ${repo}/${image}:v${version.major}-${shortPlatform}`, `docker push ${repo}/${image}:v${version.major}-${shortPlatform}`, + // Add major.minor version tag (e.g., v2.9-amd64) + `docker tag ${repo}/${image}:${version.toString()}-${shortPlatform} ${repo}/${image}:v${version.major}.${version.minor}-${shortPlatform}`, `docker push ${repo}/${image}:v${version.major}.${version.minor}-${shortPlatform}`); } } commands.push(`docker push -a ${repo}/${image}`, `docker manifest create ${repo}/${image}:${version.toString()} ${manifests.join(' ')}`, `docker manifest push ${repo}/${image}:${version.toString()}`); // Create and push latest manifest if this is the latest version if (isLatest) { const latestManifests = manifests.map(m => m.replace(`:${version.toString()}-`, ':latest-')); - commands.push(`docker manifest create ${repo}/${image}:latest ${latestManifests.join(' ')}`, `docker manifest push ${repo}/${image}:latest`); + const majorManifests = manifests.map(m => m.replace(`:${version.toString()}-`, `:v${version.major}-`)); + const majorMinorManifests = manifests.map(m => m.replace(`:${version.toString()}-`, `:v${version.major}.${version.minor}-`)); + commands.push(`docker manifest create ${repo}/${image}:latest ${latestManifests.join(' ')}`, `docker manifest push ${repo}/${image}:latest`, `docker manifest create ${repo}/${image}:v${version.major} ${majorManifests.join(' ')}`, `docker manifest push ${repo}/${image}:v${version.major}`, `docker manifest create ${repo}/${image}:v${version.major}.${version.minor} ${majorMinorManifests.join(' ')}`, `docker manifest push ${repo}/${image}:v${version.major}.${version.minor}`); } } return commands; @@ -25236,14 +25020,6 @@ module.exports = require("net"); /***/ }), -/***/ 6005: -/***/ ((module) => { - -"use strict"; -module.exports = require("node:crypto"); - -/***/ }), - /***/ 5673: /***/ ((module) => { diff --git a/actions/should-release/dist/index.js b/actions/should-release/dist/index.js index 3c7d9a49..86d8db45 100644 --- a/actions/should-release/dist/index.js +++ b/actions/should-release/dist/index.js @@ -83557,132 +83557,6 @@ function onConnectTimeout (socket) { module.exports = buildConnector -/***/ }), - -/***/ 14462: -/***/ ((module) => { - -"use strict"; - - -/** @type {Record} */ -const headerNameLowerCasedRecord = {} - -// https://developer.mozilla.org/docs/Web/HTTP/Headers -const wellknownHeaderNames = [ - 'Accept', - 'Accept-Encoding', - 'Accept-Language', - 'Accept-Ranges', - 'Access-Control-Allow-Credentials', - 'Access-Control-Allow-Headers', - 'Access-Control-Allow-Methods', - 'Access-Control-Allow-Origin', - 'Access-Control-Expose-Headers', - 'Access-Control-Max-Age', - 'Access-Control-Request-Headers', - 'Access-Control-Request-Method', - 'Age', - 'Allow', - 'Alt-Svc', - 'Alt-Used', - 'Authorization', - 'Cache-Control', - 'Clear-Site-Data', - 'Connection', - 'Content-Disposition', - 'Content-Encoding', - 'Content-Language', - 'Content-Length', - 'Content-Location', - 'Content-Range', - 'Content-Security-Policy', - 'Content-Security-Policy-Report-Only', - 'Content-Type', - 'Cookie', - 'Cross-Origin-Embedder-Policy', - 'Cross-Origin-Opener-Policy', - 'Cross-Origin-Resource-Policy', - 'Date', - 'Device-Memory', - 'Downlink', - 'ECT', - 'ETag', - 'Expect', - 'Expect-CT', - 'Expires', - 'Forwarded', - 'From', - 'Host', - 'If-Match', - 'If-Modified-Since', - 'If-None-Match', - 'If-Range', - 'If-Unmodified-Since', - 'Keep-Alive', - 'Last-Modified', - 'Link', - 'Location', - 'Max-Forwards', - 'Origin', - 'Permissions-Policy', - 'Pragma', - 'Proxy-Authenticate', - 'Proxy-Authorization', - 'RTT', - 'Range', - 'Referer', - 'Referrer-Policy', - 'Refresh', - 'Retry-After', - 'Sec-WebSocket-Accept', - 'Sec-WebSocket-Extensions', - 'Sec-WebSocket-Key', - 'Sec-WebSocket-Protocol', - 'Sec-WebSocket-Version', - 'Server', - 'Server-Timing', - 'Service-Worker-Allowed', - 'Service-Worker-Navigation-Preload', - 'Set-Cookie', - 'SourceMap', - 'Strict-Transport-Security', - 'Supports-Loading-Mode', - 'TE', - 'Timing-Allow-Origin', - 'Trailer', - 'Transfer-Encoding', - 'Upgrade', - 'Upgrade-Insecure-Requests', - 'User-Agent', - 'Vary', - 'Via', - 'WWW-Authenticate', - 'X-Content-Type-Options', - 'X-DNS-Prefetch-Control', - 'X-Frame-Options', - 'X-Permitted-Cross-Domain-Policies', - 'X-Powered-By', - 'X-Requested-With', - 'X-XSS-Protection' -] - -for (let i = 0; i < wellknownHeaderNames.length; ++i) { - const key = wellknownHeaderNames[i] - const lowerCasedKey = key.toLowerCase() - headerNameLowerCasedRecord[key] = headerNameLowerCasedRecord[lowerCasedKey] = - lowerCasedKey -} - -// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`. -Object.setPrototypeOf(headerNameLowerCasedRecord, null) - -module.exports = { - wellknownHeaderNames, - headerNameLowerCasedRecord -} - - /***/ }), /***/ 48045: @@ -84515,7 +84389,6 @@ const { InvalidArgumentError } = __nccwpck_require__(48045) const { Blob } = __nccwpck_require__(14300) const nodeUtil = __nccwpck_require__(73837) const { stringify } = __nccwpck_require__(63477) -const { headerNameLowerCasedRecord } = __nccwpck_require__(14462) const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v)) @@ -84725,15 +84598,6 @@ function parseKeepAliveTimeout (val) { return m ? parseInt(m[1], 10) * 1000 : null } -/** - * Retrieves a header name and returns its lowercase value. - * @param {string | Buffer} value Header name - * @returns {string} - */ -function headerNameToString (value) { - return headerNameLowerCasedRecord[value] || value.toLowerCase() -} - function parseHeaders (headers, obj = {}) { // For H2 support if (!Array.isArray(headers)) return headers @@ -85005,7 +84869,6 @@ module.exports = { isIterable, isAsyncIterable, isDestroyed, - headerNameToString, parseRawHeaders, parseHeaders, parseKeepAliveTimeout, @@ -85285,14 +85148,6 @@ const { isUint8Array, isArrayBuffer } = __nccwpck_require__(29830) const { File: UndiciFile } = __nccwpck_require__(78511) const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(685) -let random -try { - const crypto = __nccwpck_require__(6005) - random = (max) => crypto.randomInt(0, max) -} catch { - random = (max) => Math.floor(Math.random(max)) -} - let ReadableStream = globalThis.ReadableStream /** @type {globalThis['File']} */ @@ -85378,7 +85233,7 @@ function extractBody (object, keepalive = false) { // Set source to a copy of the bytes held by object. source = new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength)) } else if (util.isFormDataLike(object)) { - const boundary = `----formdata-undici-0${`${random(1e11)}`.padStart(11, '0')}` + const boundary = `----formdata-undici-0${`${Math.floor(Math.random() * 1e11)}`.padStart(11, '0')}` const prefix = `--${boundary}\r\nContent-Disposition: form-data` /*! formdata-polyfill. MIT License. Jimmy Wärting */ @@ -89150,9 +89005,6 @@ function httpRedirectFetch (fetchParams, response) { // https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name request.headersList.delete('authorization') - // https://fetch.spec.whatwg.org/#authentication-entries - request.headersList.delete('proxy-authorization', true) - // "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement. request.headersList.delete('cookie') request.headersList.delete('host') @@ -91661,18 +91513,14 @@ const { isBlobLike, toUSVString, ReadableStreamFrom } = __nccwpck_require__(8398 const assert = __nccwpck_require__(39491) const { isUint8Array } = __nccwpck_require__(29830) -let supportedHashes = [] - // https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable /** @type {import('crypto')|undefined} */ let crypto try { crypto = __nccwpck_require__(6113) - const possibleRelevantHashes = ['sha256', 'sha384', 'sha512'] - supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash)) -/* c8 ignore next 3 */ } catch { + } function responseURL (response) { @@ -92200,56 +92048,66 @@ function bytesMatch (bytes, metadataList) { return true } - // 3. If response is not eligible for integrity validation, return false. - // TODO - - // 4. If parsedMetadata is the empty set, return true. + // 3. If parsedMetadata is the empty set, return true. if (parsedMetadata.length === 0) { return true } - // 5. Let metadata be the result of getting the strongest + // 4. Let metadata be the result of getting the strongest // metadata from parsedMetadata. - const strongest = getStrongestMetadata(parsedMetadata) - const metadata = filterMetadataListByAlgorithm(parsedMetadata, strongest) + const list = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo)) + // get the strongest algorithm + const strongest = list[0].algo + // get all entries that use the strongest algorithm; ignore weaker + const metadata = list.filter((item) => item.algo === strongest) - // 6. For each item in metadata: + // 5. For each item in metadata: for (const item of metadata) { // 1. Let algorithm be the alg component of item. const algorithm = item.algo // 2. Let expectedValue be the val component of item. - const expectedValue = item.hash + let expectedValue = item.hash // See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e // "be liberal with padding". This is annoying, and it's not even in the spec. + if (expectedValue.endsWith('==')) { + expectedValue = expectedValue.slice(0, -2) + } + // 3. Let actualValue be the result of applying algorithm to bytes. let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64') - if (actualValue[actualValue.length - 1] === '=') { - if (actualValue[actualValue.length - 2] === '=') { - actualValue = actualValue.slice(0, -2) - } else { - actualValue = actualValue.slice(0, -1) - } + if (actualValue.endsWith('==')) { + actualValue = actualValue.slice(0, -2) } // 4. If actualValue is a case-sensitive match for expectedValue, // return true. - if (compareBase64Mixed(actualValue, expectedValue)) { + if (actualValue === expectedValue) { + return true + } + + let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url') + + if (actualBase64URL.endsWith('==')) { + actualBase64URL = actualBase64URL.slice(0, -2) + } + + if (actualBase64URL === expectedValue) { return true } } - // 7. Return false. + // 6. Return false. return false } // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options // https://www.w3.org/TR/CSP2/#source-list-syntax // https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1 -const parseHashWithOptions = /(?sha256|sha384|sha512)-((?[A-Za-z0-9+/]+|[A-Za-z0-9_-]+)={0,2}(?:\s|$)( +[!-~]*)?)?/i +const parseHashWithOptions = /((?sha256|sha384|sha512)-(?[A-z0-9+/]{1}.*={0,2}))( +[\x21-\x7e]?)?/i /** * @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata @@ -92263,6 +92121,8 @@ function parseMetadata (metadata) { // 2. Let empty be equal to true. let empty = true + const supportedHashes = crypto.getHashes() + // 3. For each token returned by splitting metadata on spaces: for (const token of metadata.split(' ')) { // 1. Set empty to false. @@ -92272,11 +92132,7 @@ function parseMetadata (metadata) { const parsedToken = parseHashWithOptions.exec(token) // 3. If token does not parse, continue to the next token. - if ( - parsedToken === null || - parsedToken.groups === undefined || - parsedToken.groups.algo === undefined - ) { + if (parsedToken === null || parsedToken.groups === undefined) { // Note: Chromium blocks the request at this point, but Firefox // gives a warning that an invalid integrity was given. The // correct behavior is to ignore these, and subsequently not @@ -92285,11 +92141,11 @@ function parseMetadata (metadata) { } // 4. Let algorithm be the hash-algo component of token. - const algorithm = parsedToken.groups.algo.toLowerCase() + const algorithm = parsedToken.groups.algo // 5. If algorithm is a hash function recognized by the user // agent, add the parsed token to result. - if (supportedHashes.includes(algorithm)) { + if (supportedHashes.includes(algorithm.toLowerCase())) { result.push(parsedToken.groups) } } @@ -92302,82 +92158,6 @@ function parseMetadata (metadata) { return result } -/** - * @param {{ algo: 'sha256' | 'sha384' | 'sha512' }[]} metadataList - */ -function getStrongestMetadata (metadataList) { - // Let algorithm be the algo component of the first item in metadataList. - // Can be sha256 - let algorithm = metadataList[0].algo - // If the algorithm is sha512, then it is the strongest - // and we can return immediately - if (algorithm[3] === '5') { - return algorithm - } - - for (let i = 1; i < metadataList.length; ++i) { - const metadata = metadataList[i] - // If the algorithm is sha512, then it is the strongest - // and we can break the loop immediately - if (metadata.algo[3] === '5') { - algorithm = 'sha512' - break - // If the algorithm is sha384, then a potential sha256 or sha384 is ignored - } else if (algorithm[3] === '3') { - continue - // algorithm is sha256, check if algorithm is sha384 and if so, set it as - // the strongest - } else if (metadata.algo[3] === '3') { - algorithm = 'sha384' - } - } - return algorithm -} - -function filterMetadataListByAlgorithm (metadataList, algorithm) { - if (metadataList.length === 1) { - return metadataList - } - - let pos = 0 - for (let i = 0; i < metadataList.length; ++i) { - if (metadataList[i].algo === algorithm) { - metadataList[pos++] = metadataList[i] - } - } - - metadataList.length = pos - - return metadataList -} - -/** - * Compares two base64 strings, allowing for base64url - * in the second string. - * -* @param {string} actualValue always base64 - * @param {string} expectedValue base64 or base64url - * @returns {boolean} - */ -function compareBase64Mixed (actualValue, expectedValue) { - if (actualValue.length !== expectedValue.length) { - return false - } - for (let i = 0; i < actualValue.length; ++i) { - if (actualValue[i] !== expectedValue[i]) { - if ( - (actualValue[i] === '+' && expectedValue[i] === '-') || - (actualValue[i] === '/' && expectedValue[i] === '_') - ) { - continue - } - return false - } - } - - return true -} - // https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) { // TODO @@ -92793,8 +92573,7 @@ module.exports = { urlHasHttpsScheme, urlIsHttpHttpsScheme, readAllBytes, - normalizeMethodRecord, - parseMetadata + normalizeMethodRecord } @@ -94881,17 +94660,12 @@ function parseLocation (statusCode, headers) { // https://tools.ietf.org/html/rfc7231#section-6.4.4 function shouldRemoveHeader (header, removeContent, unknownOrigin) { - if (header.length === 4) { - return util.headerNameToString(header) === 'host' - } - if (removeContent && util.headerNameToString(header).startsWith('content-')) { - return true - } - if (unknownOrigin && (header.length === 13 || header.length === 6 || header.length === 19)) { - const name = util.headerNameToString(header) - return name === 'authorization' || name === 'cookie' || name === 'proxy-authorization' - } - return false + return ( + (header.length === 4 && header.toString().toLowerCase() === 'host') || + (removeContent && header.toString().toLowerCase().indexOf('content-') === 0) || + (unknownOrigin && header.length === 13 && header.toString().toLowerCase() === 'authorization') || + (unknownOrigin && header.length === 6 && header.toString().toLowerCase() === 'cookie') + ) } // https://tools.ietf.org/html/rfc7231#section-6.4 @@ -107708,14 +107482,6 @@ module.exports = require("net"); /***/ }), -/***/ 6005: -/***/ ((module) => { - -"use strict"; -module.exports = require("node:crypto"); - -/***/ }), - /***/ 15673: /***/ ((module) => { From e4ceb6aec98ee09709551b522171aeb657dc8cc6 Mon Sep 17 00:00:00 2001 From: Paul Rogers Date: Wed, 29 Jan 2025 11:29:43 -0500 Subject: [PATCH 4/5] npm linting is awful --- actions/push-images/src/docker.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/actions/push-images/src/docker.ts b/actions/push-images/src/docker.ts index 68628f24..b7c2f942 100644 --- a/actions/push-images/src/docker.ts +++ b/actions/push-images/src/docker.ts @@ -152,7 +152,6 @@ export function buildCommands( `:v${version.major}.${version.minor}-` ) ) - commands.push( `docker manifest create ${repo}/${image}:latest ${latestManifests.join(' ')}`, `docker manifest push ${repo}/${image}:latest`, From 0e21f06dd9de22ff31cc463df774760a63530c1e Mon Sep 17 00:00:00 2001 From: Paul Rogers Date: Wed, 29 Jan 2025 11:32:50 -0500 Subject: [PATCH 5/5] lets run actions again because why not --- actions/install-binary/dist/index.js | 318 +++++++++++++++++++++++---- actions/push-images/dist/index.js | 318 +++++++++++++++++++++++---- actions/should-release/dist/index.js | 318 +++++++++++++++++++++++---- 3 files changed, 828 insertions(+), 126 deletions(-) diff --git a/actions/install-binary/dist/index.js b/actions/install-binary/dist/index.js index b5c322bd..5921f147 100644 --- a/actions/install-binary/dist/index.js +++ b/actions/install-binary/dist/index.js @@ -12308,6 +12308,132 @@ function onConnectTimeout (socket) { module.exports = buildConnector +/***/ }), + +/***/ 4462: +/***/ ((module) => { + +"use strict"; + + +/** @type {Record} */ +const headerNameLowerCasedRecord = {} + +// https://developer.mozilla.org/docs/Web/HTTP/Headers +const wellknownHeaderNames = [ + 'Accept', + 'Accept-Encoding', + 'Accept-Language', + 'Accept-Ranges', + 'Access-Control-Allow-Credentials', + 'Access-Control-Allow-Headers', + 'Access-Control-Allow-Methods', + 'Access-Control-Allow-Origin', + 'Access-Control-Expose-Headers', + 'Access-Control-Max-Age', + 'Access-Control-Request-Headers', + 'Access-Control-Request-Method', + 'Age', + 'Allow', + 'Alt-Svc', + 'Alt-Used', + 'Authorization', + 'Cache-Control', + 'Clear-Site-Data', + 'Connection', + 'Content-Disposition', + 'Content-Encoding', + 'Content-Language', + 'Content-Length', + 'Content-Location', + 'Content-Range', + 'Content-Security-Policy', + 'Content-Security-Policy-Report-Only', + 'Content-Type', + 'Cookie', + 'Cross-Origin-Embedder-Policy', + 'Cross-Origin-Opener-Policy', + 'Cross-Origin-Resource-Policy', + 'Date', + 'Device-Memory', + 'Downlink', + 'ECT', + 'ETag', + 'Expect', + 'Expect-CT', + 'Expires', + 'Forwarded', + 'From', + 'Host', + 'If-Match', + 'If-Modified-Since', + 'If-None-Match', + 'If-Range', + 'If-Unmodified-Since', + 'Keep-Alive', + 'Last-Modified', + 'Link', + 'Location', + 'Max-Forwards', + 'Origin', + 'Permissions-Policy', + 'Pragma', + 'Proxy-Authenticate', + 'Proxy-Authorization', + 'RTT', + 'Range', + 'Referer', + 'Referrer-Policy', + 'Refresh', + 'Retry-After', + 'Sec-WebSocket-Accept', + 'Sec-WebSocket-Extensions', + 'Sec-WebSocket-Key', + 'Sec-WebSocket-Protocol', + 'Sec-WebSocket-Version', + 'Server', + 'Server-Timing', + 'Service-Worker-Allowed', + 'Service-Worker-Navigation-Preload', + 'Set-Cookie', + 'SourceMap', + 'Strict-Transport-Security', + 'Supports-Loading-Mode', + 'TE', + 'Timing-Allow-Origin', + 'Trailer', + 'Transfer-Encoding', + 'Upgrade', + 'Upgrade-Insecure-Requests', + 'User-Agent', + 'Vary', + 'Via', + 'WWW-Authenticate', + 'X-Content-Type-Options', + 'X-DNS-Prefetch-Control', + 'X-Frame-Options', + 'X-Permitted-Cross-Domain-Policies', + 'X-Powered-By', + 'X-Requested-With', + 'X-XSS-Protection' +] + +for (let i = 0; i < wellknownHeaderNames.length; ++i) { + const key = wellknownHeaderNames[i] + const lowerCasedKey = key.toLowerCase() + headerNameLowerCasedRecord[key] = headerNameLowerCasedRecord[lowerCasedKey] = + lowerCasedKey +} + +// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`. +Object.setPrototypeOf(headerNameLowerCasedRecord, null) + +module.exports = { + wellknownHeaderNames, + headerNameLowerCasedRecord +} + + /***/ }), /***/ 8045: @@ -13140,6 +13266,7 @@ const { InvalidArgumentError } = __nccwpck_require__(8045) const { Blob } = __nccwpck_require__(4300) const nodeUtil = __nccwpck_require__(3837) const { stringify } = __nccwpck_require__(3477) +const { headerNameLowerCasedRecord } = __nccwpck_require__(4462) const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v)) @@ -13349,6 +13476,15 @@ function parseKeepAliveTimeout (val) { return m ? parseInt(m[1], 10) * 1000 : null } +/** + * Retrieves a header name and returns its lowercase value. + * @param {string | Buffer} value Header name + * @returns {string} + */ +function headerNameToString (value) { + return headerNameLowerCasedRecord[value] || value.toLowerCase() +} + function parseHeaders (headers, obj = {}) { // For H2 support if (!Array.isArray(headers)) return headers @@ -13620,6 +13756,7 @@ module.exports = { isIterable, isAsyncIterable, isDestroyed, + headerNameToString, parseRawHeaders, parseHeaders, parseKeepAliveTimeout, @@ -13899,6 +14036,14 @@ const { isUint8Array, isArrayBuffer } = __nccwpck_require__(9830) const { File: UndiciFile } = __nccwpck_require__(8511) const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(685) +let random +try { + const crypto = __nccwpck_require__(6005) + random = (max) => crypto.randomInt(0, max) +} catch { + random = (max) => Math.floor(Math.random(max)) +} + let ReadableStream = globalThis.ReadableStream /** @type {globalThis['File']} */ @@ -13984,7 +14129,7 @@ function extractBody (object, keepalive = false) { // Set source to a copy of the bytes held by object. source = new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength)) } else if (util.isFormDataLike(object)) { - const boundary = `----formdata-undici-0${`${Math.floor(Math.random() * 1e11)}`.padStart(11, '0')}` + const boundary = `----formdata-undici-0${`${random(1e11)}`.padStart(11, '0')}` const prefix = `--${boundary}\r\nContent-Disposition: form-data` /*! formdata-polyfill. MIT License. Jimmy Wärting */ @@ -17756,6 +17901,9 @@ function httpRedirectFetch (fetchParams, response) { // https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name request.headersList.delete('authorization') + // https://fetch.spec.whatwg.org/#authentication-entries + request.headersList.delete('proxy-authorization', true) + // "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement. request.headersList.delete('cookie') request.headersList.delete('host') @@ -20264,14 +20412,18 @@ const { isBlobLike, toUSVString, ReadableStreamFrom } = __nccwpck_require__(3983 const assert = __nccwpck_require__(9491) const { isUint8Array } = __nccwpck_require__(9830) +let supportedHashes = [] + // https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable /** @type {import('crypto')|undefined} */ let crypto try { crypto = __nccwpck_require__(6113) + const possibleRelevantHashes = ['sha256', 'sha384', 'sha512'] + supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash)) +/* c8 ignore next 3 */ } catch { - } function responseURL (response) { @@ -20799,66 +20951,56 @@ function bytesMatch (bytes, metadataList) { return true } - // 3. If parsedMetadata is the empty set, return true. + // 3. If response is not eligible for integrity validation, return false. + // TODO + + // 4. If parsedMetadata is the empty set, return true. if (parsedMetadata.length === 0) { return true } - // 4. Let metadata be the result of getting the strongest + // 5. Let metadata be the result of getting the strongest // metadata from parsedMetadata. - const list = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo)) - // get the strongest algorithm - const strongest = list[0].algo - // get all entries that use the strongest algorithm; ignore weaker - const metadata = list.filter((item) => item.algo === strongest) + const strongest = getStrongestMetadata(parsedMetadata) + const metadata = filterMetadataListByAlgorithm(parsedMetadata, strongest) - // 5. For each item in metadata: + // 6. For each item in metadata: for (const item of metadata) { // 1. Let algorithm be the alg component of item. const algorithm = item.algo // 2. Let expectedValue be the val component of item. - let expectedValue = item.hash + const expectedValue = item.hash // See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e // "be liberal with padding". This is annoying, and it's not even in the spec. - if (expectedValue.endsWith('==')) { - expectedValue = expectedValue.slice(0, -2) - } - // 3. Let actualValue be the result of applying algorithm to bytes. let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64') - if (actualValue.endsWith('==')) { - actualValue = actualValue.slice(0, -2) + if (actualValue[actualValue.length - 1] === '=') { + if (actualValue[actualValue.length - 2] === '=') { + actualValue = actualValue.slice(0, -2) + } else { + actualValue = actualValue.slice(0, -1) + } } // 4. If actualValue is a case-sensitive match for expectedValue, // return true. - if (actualValue === expectedValue) { - return true - } - - let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url') - - if (actualBase64URL.endsWith('==')) { - actualBase64URL = actualBase64URL.slice(0, -2) - } - - if (actualBase64URL === expectedValue) { + if (compareBase64Mixed(actualValue, expectedValue)) { return true } } - // 6. Return false. + // 7. Return false. return false } // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options // https://www.w3.org/TR/CSP2/#source-list-syntax // https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1 -const parseHashWithOptions = /((?sha256|sha384|sha512)-(?[A-z0-9+/]{1}.*={0,2}))( +[\x21-\x7e]?)?/i +const parseHashWithOptions = /(?sha256|sha384|sha512)-((?[A-Za-z0-9+/]+|[A-Za-z0-9_-]+)={0,2}(?:\s|$)( +[!-~]*)?)?/i /** * @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata @@ -20872,8 +21014,6 @@ function parseMetadata (metadata) { // 2. Let empty be equal to true. let empty = true - const supportedHashes = crypto.getHashes() - // 3. For each token returned by splitting metadata on spaces: for (const token of metadata.split(' ')) { // 1. Set empty to false. @@ -20883,7 +21023,11 @@ function parseMetadata (metadata) { const parsedToken = parseHashWithOptions.exec(token) // 3. If token does not parse, continue to the next token. - if (parsedToken === null || parsedToken.groups === undefined) { + if ( + parsedToken === null || + parsedToken.groups === undefined || + parsedToken.groups.algo === undefined + ) { // Note: Chromium blocks the request at this point, but Firefox // gives a warning that an invalid integrity was given. The // correct behavior is to ignore these, and subsequently not @@ -20892,11 +21036,11 @@ function parseMetadata (metadata) { } // 4. Let algorithm be the hash-algo component of token. - const algorithm = parsedToken.groups.algo + const algorithm = parsedToken.groups.algo.toLowerCase() // 5. If algorithm is a hash function recognized by the user // agent, add the parsed token to result. - if (supportedHashes.includes(algorithm.toLowerCase())) { + if (supportedHashes.includes(algorithm)) { result.push(parsedToken.groups) } } @@ -20909,6 +21053,82 @@ function parseMetadata (metadata) { return result } +/** + * @param {{ algo: 'sha256' | 'sha384' | 'sha512' }[]} metadataList + */ +function getStrongestMetadata (metadataList) { + // Let algorithm be the algo component of the first item in metadataList. + // Can be sha256 + let algorithm = metadataList[0].algo + // If the algorithm is sha512, then it is the strongest + // and we can return immediately + if (algorithm[3] === '5') { + return algorithm + } + + for (let i = 1; i < metadataList.length; ++i) { + const metadata = metadataList[i] + // If the algorithm is sha512, then it is the strongest + // and we can break the loop immediately + if (metadata.algo[3] === '5') { + algorithm = 'sha512' + break + // If the algorithm is sha384, then a potential sha256 or sha384 is ignored + } else if (algorithm[3] === '3') { + continue + // algorithm is sha256, check if algorithm is sha384 and if so, set it as + // the strongest + } else if (metadata.algo[3] === '3') { + algorithm = 'sha384' + } + } + return algorithm +} + +function filterMetadataListByAlgorithm (metadataList, algorithm) { + if (metadataList.length === 1) { + return metadataList + } + + let pos = 0 + for (let i = 0; i < metadataList.length; ++i) { + if (metadataList[i].algo === algorithm) { + metadataList[pos++] = metadataList[i] + } + } + + metadataList.length = pos + + return metadataList +} + +/** + * Compares two base64 strings, allowing for base64url + * in the second string. + * +* @param {string} actualValue always base64 + * @param {string} expectedValue base64 or base64url + * @returns {boolean} + */ +function compareBase64Mixed (actualValue, expectedValue) { + if (actualValue.length !== expectedValue.length) { + return false + } + for (let i = 0; i < actualValue.length; ++i) { + if (actualValue[i] !== expectedValue[i]) { + if ( + (actualValue[i] === '+' && expectedValue[i] === '-') || + (actualValue[i] === '/' && expectedValue[i] === '_') + ) { + continue + } + return false + } + } + + return true +} + // https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) { // TODO @@ -21324,7 +21544,8 @@ module.exports = { urlHasHttpsScheme, urlIsHttpHttpsScheme, readAllBytes, - normalizeMethodRecord + normalizeMethodRecord, + parseMetadata } @@ -23411,12 +23632,17 @@ function parseLocation (statusCode, headers) { // https://tools.ietf.org/html/rfc7231#section-6.4.4 function shouldRemoveHeader (header, removeContent, unknownOrigin) { - return ( - (header.length === 4 && header.toString().toLowerCase() === 'host') || - (removeContent && header.toString().toLowerCase().indexOf('content-') === 0) || - (unknownOrigin && header.length === 13 && header.toString().toLowerCase() === 'authorization') || - (unknownOrigin && header.length === 6 && header.toString().toLowerCase() === 'cookie') - ) + if (header.length === 4) { + return util.headerNameToString(header) === 'host' + } + if (removeContent && util.headerNameToString(header).startsWith('content-')) { + return true + } + if (unknownOrigin && (header.length === 13 || header.length === 6 || header.length === 19)) { + const name = util.headerNameToString(header) + return name === 'authorization' || name === 'cookie' || name === 'proxy-authorization' + } + return false } // https://tools.ietf.org/html/rfc7231#section-6.4 @@ -28743,6 +28969,14 @@ module.exports = require("net"); /***/ }), +/***/ 6005: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:crypto"); + +/***/ }), + /***/ 5673: /***/ ((module) => { diff --git a/actions/push-images/dist/index.js b/actions/push-images/dist/index.js index 2969e5f3..0b335ed8 100644 --- a/actions/push-images/dist/index.js +++ b/actions/push-images/dist/index.js @@ -8446,6 +8446,132 @@ function onConnectTimeout (socket) { module.exports = buildConnector +/***/ }), + +/***/ 4462: +/***/ ((module) => { + +"use strict"; + + +/** @type {Record} */ +const headerNameLowerCasedRecord = {} + +// https://developer.mozilla.org/docs/Web/HTTP/Headers +const wellknownHeaderNames = [ + 'Accept', + 'Accept-Encoding', + 'Accept-Language', + 'Accept-Ranges', + 'Access-Control-Allow-Credentials', + 'Access-Control-Allow-Headers', + 'Access-Control-Allow-Methods', + 'Access-Control-Allow-Origin', + 'Access-Control-Expose-Headers', + 'Access-Control-Max-Age', + 'Access-Control-Request-Headers', + 'Access-Control-Request-Method', + 'Age', + 'Allow', + 'Alt-Svc', + 'Alt-Used', + 'Authorization', + 'Cache-Control', + 'Clear-Site-Data', + 'Connection', + 'Content-Disposition', + 'Content-Encoding', + 'Content-Language', + 'Content-Length', + 'Content-Location', + 'Content-Range', + 'Content-Security-Policy', + 'Content-Security-Policy-Report-Only', + 'Content-Type', + 'Cookie', + 'Cross-Origin-Embedder-Policy', + 'Cross-Origin-Opener-Policy', + 'Cross-Origin-Resource-Policy', + 'Date', + 'Device-Memory', + 'Downlink', + 'ECT', + 'ETag', + 'Expect', + 'Expect-CT', + 'Expires', + 'Forwarded', + 'From', + 'Host', + 'If-Match', + 'If-Modified-Since', + 'If-None-Match', + 'If-Range', + 'If-Unmodified-Since', + 'Keep-Alive', + 'Last-Modified', + 'Link', + 'Location', + 'Max-Forwards', + 'Origin', + 'Permissions-Policy', + 'Pragma', + 'Proxy-Authenticate', + 'Proxy-Authorization', + 'RTT', + 'Range', + 'Referer', + 'Referrer-Policy', + 'Refresh', + 'Retry-After', + 'Sec-WebSocket-Accept', + 'Sec-WebSocket-Extensions', + 'Sec-WebSocket-Key', + 'Sec-WebSocket-Protocol', + 'Sec-WebSocket-Version', + 'Server', + 'Server-Timing', + 'Service-Worker-Allowed', + 'Service-Worker-Navigation-Preload', + 'Set-Cookie', + 'SourceMap', + 'Strict-Transport-Security', + 'Supports-Loading-Mode', + 'TE', + 'Timing-Allow-Origin', + 'Trailer', + 'Transfer-Encoding', + 'Upgrade', + 'Upgrade-Insecure-Requests', + 'User-Agent', + 'Vary', + 'Via', + 'WWW-Authenticate', + 'X-Content-Type-Options', + 'X-DNS-Prefetch-Control', + 'X-Frame-Options', + 'X-Permitted-Cross-Domain-Policies', + 'X-Powered-By', + 'X-Requested-With', + 'X-XSS-Protection' +] + +for (let i = 0; i < wellknownHeaderNames.length; ++i) { + const key = wellknownHeaderNames[i] + const lowerCasedKey = key.toLowerCase() + headerNameLowerCasedRecord[key] = headerNameLowerCasedRecord[lowerCasedKey] = + lowerCasedKey +} + +// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`. +Object.setPrototypeOf(headerNameLowerCasedRecord, null) + +module.exports = { + wellknownHeaderNames, + headerNameLowerCasedRecord +} + + /***/ }), /***/ 8045: @@ -9278,6 +9404,7 @@ const { InvalidArgumentError } = __nccwpck_require__(8045) const { Blob } = __nccwpck_require__(4300) const nodeUtil = __nccwpck_require__(3837) const { stringify } = __nccwpck_require__(3477) +const { headerNameLowerCasedRecord } = __nccwpck_require__(4462) const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v)) @@ -9487,6 +9614,15 @@ function parseKeepAliveTimeout (val) { return m ? parseInt(m[1], 10) * 1000 : null } +/** + * Retrieves a header name and returns its lowercase value. + * @param {string | Buffer} value Header name + * @returns {string} + */ +function headerNameToString (value) { + return headerNameLowerCasedRecord[value] || value.toLowerCase() +} + function parseHeaders (headers, obj = {}) { // For H2 support if (!Array.isArray(headers)) return headers @@ -9758,6 +9894,7 @@ module.exports = { isIterable, isAsyncIterable, isDestroyed, + headerNameToString, parseRawHeaders, parseHeaders, parseKeepAliveTimeout, @@ -10037,6 +10174,14 @@ const { isUint8Array, isArrayBuffer } = __nccwpck_require__(9830) const { File: UndiciFile } = __nccwpck_require__(8511) const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(685) +let random +try { + const crypto = __nccwpck_require__(6005) + random = (max) => crypto.randomInt(0, max) +} catch { + random = (max) => Math.floor(Math.random(max)) +} + let ReadableStream = globalThis.ReadableStream /** @type {globalThis['File']} */ @@ -10122,7 +10267,7 @@ function extractBody (object, keepalive = false) { // Set source to a copy of the bytes held by object. source = new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength)) } else if (util.isFormDataLike(object)) { - const boundary = `----formdata-undici-0${`${Math.floor(Math.random() * 1e11)}`.padStart(11, '0')}` + const boundary = `----formdata-undici-0${`${random(1e11)}`.padStart(11, '0')}` const prefix = `--${boundary}\r\nContent-Disposition: form-data` /*! formdata-polyfill. MIT License. Jimmy Wärting */ @@ -13894,6 +14039,9 @@ function httpRedirectFetch (fetchParams, response) { // https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name request.headersList.delete('authorization') + // https://fetch.spec.whatwg.org/#authentication-entries + request.headersList.delete('proxy-authorization', true) + // "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement. request.headersList.delete('cookie') request.headersList.delete('host') @@ -16402,14 +16550,18 @@ const { isBlobLike, toUSVString, ReadableStreamFrom } = __nccwpck_require__(3983 const assert = __nccwpck_require__(9491) const { isUint8Array } = __nccwpck_require__(9830) +let supportedHashes = [] + // https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable /** @type {import('crypto')|undefined} */ let crypto try { crypto = __nccwpck_require__(6113) + const possibleRelevantHashes = ['sha256', 'sha384', 'sha512'] + supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash)) +/* c8 ignore next 3 */ } catch { - } function responseURL (response) { @@ -16937,66 +17089,56 @@ function bytesMatch (bytes, metadataList) { return true } - // 3. If parsedMetadata is the empty set, return true. + // 3. If response is not eligible for integrity validation, return false. + // TODO + + // 4. If parsedMetadata is the empty set, return true. if (parsedMetadata.length === 0) { return true } - // 4. Let metadata be the result of getting the strongest + // 5. Let metadata be the result of getting the strongest // metadata from parsedMetadata. - const list = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo)) - // get the strongest algorithm - const strongest = list[0].algo - // get all entries that use the strongest algorithm; ignore weaker - const metadata = list.filter((item) => item.algo === strongest) + const strongest = getStrongestMetadata(parsedMetadata) + const metadata = filterMetadataListByAlgorithm(parsedMetadata, strongest) - // 5. For each item in metadata: + // 6. For each item in metadata: for (const item of metadata) { // 1. Let algorithm be the alg component of item. const algorithm = item.algo // 2. Let expectedValue be the val component of item. - let expectedValue = item.hash + const expectedValue = item.hash // See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e // "be liberal with padding". This is annoying, and it's not even in the spec. - if (expectedValue.endsWith('==')) { - expectedValue = expectedValue.slice(0, -2) - } - // 3. Let actualValue be the result of applying algorithm to bytes. let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64') - if (actualValue.endsWith('==')) { - actualValue = actualValue.slice(0, -2) + if (actualValue[actualValue.length - 1] === '=') { + if (actualValue[actualValue.length - 2] === '=') { + actualValue = actualValue.slice(0, -2) + } else { + actualValue = actualValue.slice(0, -1) + } } // 4. If actualValue is a case-sensitive match for expectedValue, // return true. - if (actualValue === expectedValue) { - return true - } - - let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url') - - if (actualBase64URL.endsWith('==')) { - actualBase64URL = actualBase64URL.slice(0, -2) - } - - if (actualBase64URL === expectedValue) { + if (compareBase64Mixed(actualValue, expectedValue)) { return true } } - // 6. Return false. + // 7. Return false. return false } // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options // https://www.w3.org/TR/CSP2/#source-list-syntax // https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1 -const parseHashWithOptions = /((?sha256|sha384|sha512)-(?[A-z0-9+/]{1}.*={0,2}))( +[\x21-\x7e]?)?/i +const parseHashWithOptions = /(?sha256|sha384|sha512)-((?[A-Za-z0-9+/]+|[A-Za-z0-9_-]+)={0,2}(?:\s|$)( +[!-~]*)?)?/i /** * @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata @@ -17010,8 +17152,6 @@ function parseMetadata (metadata) { // 2. Let empty be equal to true. let empty = true - const supportedHashes = crypto.getHashes() - // 3. For each token returned by splitting metadata on spaces: for (const token of metadata.split(' ')) { // 1. Set empty to false. @@ -17021,7 +17161,11 @@ function parseMetadata (metadata) { const parsedToken = parseHashWithOptions.exec(token) // 3. If token does not parse, continue to the next token. - if (parsedToken === null || parsedToken.groups === undefined) { + if ( + parsedToken === null || + parsedToken.groups === undefined || + parsedToken.groups.algo === undefined + ) { // Note: Chromium blocks the request at this point, but Firefox // gives a warning that an invalid integrity was given. The // correct behavior is to ignore these, and subsequently not @@ -17030,11 +17174,11 @@ function parseMetadata (metadata) { } // 4. Let algorithm be the hash-algo component of token. - const algorithm = parsedToken.groups.algo + const algorithm = parsedToken.groups.algo.toLowerCase() // 5. If algorithm is a hash function recognized by the user // agent, add the parsed token to result. - if (supportedHashes.includes(algorithm.toLowerCase())) { + if (supportedHashes.includes(algorithm)) { result.push(parsedToken.groups) } } @@ -17047,6 +17191,82 @@ function parseMetadata (metadata) { return result } +/** + * @param {{ algo: 'sha256' | 'sha384' | 'sha512' }[]} metadataList + */ +function getStrongestMetadata (metadataList) { + // Let algorithm be the algo component of the first item in metadataList. + // Can be sha256 + let algorithm = metadataList[0].algo + // If the algorithm is sha512, then it is the strongest + // and we can return immediately + if (algorithm[3] === '5') { + return algorithm + } + + for (let i = 1; i < metadataList.length; ++i) { + const metadata = metadataList[i] + // If the algorithm is sha512, then it is the strongest + // and we can break the loop immediately + if (metadata.algo[3] === '5') { + algorithm = 'sha512' + break + // If the algorithm is sha384, then a potential sha256 or sha384 is ignored + } else if (algorithm[3] === '3') { + continue + // algorithm is sha256, check if algorithm is sha384 and if so, set it as + // the strongest + } else if (metadata.algo[3] === '3') { + algorithm = 'sha384' + } + } + return algorithm +} + +function filterMetadataListByAlgorithm (metadataList, algorithm) { + if (metadataList.length === 1) { + return metadataList + } + + let pos = 0 + for (let i = 0; i < metadataList.length; ++i) { + if (metadataList[i].algo === algorithm) { + metadataList[pos++] = metadataList[i] + } + } + + metadataList.length = pos + + return metadataList +} + +/** + * Compares two base64 strings, allowing for base64url + * in the second string. + * +* @param {string} actualValue always base64 + * @param {string} expectedValue base64 or base64url + * @returns {boolean} + */ +function compareBase64Mixed (actualValue, expectedValue) { + if (actualValue.length !== expectedValue.length) { + return false + } + for (let i = 0; i < actualValue.length; ++i) { + if (actualValue[i] !== expectedValue[i]) { + if ( + (actualValue[i] === '+' && expectedValue[i] === '-') || + (actualValue[i] === '/' && expectedValue[i] === '_') + ) { + continue + } + return false + } + } + + return true +} + // https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) { // TODO @@ -17462,7 +17682,8 @@ module.exports = { urlHasHttpsScheme, urlIsHttpHttpsScheme, readAllBytes, - normalizeMethodRecord + normalizeMethodRecord, + parseMetadata } @@ -19549,12 +19770,17 @@ function parseLocation (statusCode, headers) { // https://tools.ietf.org/html/rfc7231#section-6.4.4 function shouldRemoveHeader (header, removeContent, unknownOrigin) { - return ( - (header.length === 4 && header.toString().toLowerCase() === 'host') || - (removeContent && header.toString().toLowerCase().indexOf('content-') === 0) || - (unknownOrigin && header.length === 13 && header.toString().toLowerCase() === 'authorization') || - (unknownOrigin && header.length === 6 && header.toString().toLowerCase() === 'cookie') - ) + if (header.length === 4) { + return util.headerNameToString(header) === 'host' + } + if (removeContent && util.headerNameToString(header).startsWith('content-')) { + return true + } + if (unknownOrigin && (header.length === 13 || header.length === 6 || header.length === 19)) { + const name = util.headerNameToString(header) + return name === 'authorization' || name === 'cookie' || name === 'proxy-authorization' + } + return false } // https://tools.ietf.org/html/rfc7231#section-6.4 @@ -25020,6 +25246,14 @@ module.exports = require("net"); /***/ }), +/***/ 6005: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:crypto"); + +/***/ }), + /***/ 5673: /***/ ((module) => { diff --git a/actions/should-release/dist/index.js b/actions/should-release/dist/index.js index 86d8db45..3c7d9a49 100644 --- a/actions/should-release/dist/index.js +++ b/actions/should-release/dist/index.js @@ -83557,6 +83557,132 @@ function onConnectTimeout (socket) { module.exports = buildConnector +/***/ }), + +/***/ 14462: +/***/ ((module) => { + +"use strict"; + + +/** @type {Record} */ +const headerNameLowerCasedRecord = {} + +// https://developer.mozilla.org/docs/Web/HTTP/Headers +const wellknownHeaderNames = [ + 'Accept', + 'Accept-Encoding', + 'Accept-Language', + 'Accept-Ranges', + 'Access-Control-Allow-Credentials', + 'Access-Control-Allow-Headers', + 'Access-Control-Allow-Methods', + 'Access-Control-Allow-Origin', + 'Access-Control-Expose-Headers', + 'Access-Control-Max-Age', + 'Access-Control-Request-Headers', + 'Access-Control-Request-Method', + 'Age', + 'Allow', + 'Alt-Svc', + 'Alt-Used', + 'Authorization', + 'Cache-Control', + 'Clear-Site-Data', + 'Connection', + 'Content-Disposition', + 'Content-Encoding', + 'Content-Language', + 'Content-Length', + 'Content-Location', + 'Content-Range', + 'Content-Security-Policy', + 'Content-Security-Policy-Report-Only', + 'Content-Type', + 'Cookie', + 'Cross-Origin-Embedder-Policy', + 'Cross-Origin-Opener-Policy', + 'Cross-Origin-Resource-Policy', + 'Date', + 'Device-Memory', + 'Downlink', + 'ECT', + 'ETag', + 'Expect', + 'Expect-CT', + 'Expires', + 'Forwarded', + 'From', + 'Host', + 'If-Match', + 'If-Modified-Since', + 'If-None-Match', + 'If-Range', + 'If-Unmodified-Since', + 'Keep-Alive', + 'Last-Modified', + 'Link', + 'Location', + 'Max-Forwards', + 'Origin', + 'Permissions-Policy', + 'Pragma', + 'Proxy-Authenticate', + 'Proxy-Authorization', + 'RTT', + 'Range', + 'Referer', + 'Referrer-Policy', + 'Refresh', + 'Retry-After', + 'Sec-WebSocket-Accept', + 'Sec-WebSocket-Extensions', + 'Sec-WebSocket-Key', + 'Sec-WebSocket-Protocol', + 'Sec-WebSocket-Version', + 'Server', + 'Server-Timing', + 'Service-Worker-Allowed', + 'Service-Worker-Navigation-Preload', + 'Set-Cookie', + 'SourceMap', + 'Strict-Transport-Security', + 'Supports-Loading-Mode', + 'TE', + 'Timing-Allow-Origin', + 'Trailer', + 'Transfer-Encoding', + 'Upgrade', + 'Upgrade-Insecure-Requests', + 'User-Agent', + 'Vary', + 'Via', + 'WWW-Authenticate', + 'X-Content-Type-Options', + 'X-DNS-Prefetch-Control', + 'X-Frame-Options', + 'X-Permitted-Cross-Domain-Policies', + 'X-Powered-By', + 'X-Requested-With', + 'X-XSS-Protection' +] + +for (let i = 0; i < wellknownHeaderNames.length; ++i) { + const key = wellknownHeaderNames[i] + const lowerCasedKey = key.toLowerCase() + headerNameLowerCasedRecord[key] = headerNameLowerCasedRecord[lowerCasedKey] = + lowerCasedKey +} + +// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`. +Object.setPrototypeOf(headerNameLowerCasedRecord, null) + +module.exports = { + wellknownHeaderNames, + headerNameLowerCasedRecord +} + + /***/ }), /***/ 48045: @@ -84389,6 +84515,7 @@ const { InvalidArgumentError } = __nccwpck_require__(48045) const { Blob } = __nccwpck_require__(14300) const nodeUtil = __nccwpck_require__(73837) const { stringify } = __nccwpck_require__(63477) +const { headerNameLowerCasedRecord } = __nccwpck_require__(14462) const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v)) @@ -84598,6 +84725,15 @@ function parseKeepAliveTimeout (val) { return m ? parseInt(m[1], 10) * 1000 : null } +/** + * Retrieves a header name and returns its lowercase value. + * @param {string | Buffer} value Header name + * @returns {string} + */ +function headerNameToString (value) { + return headerNameLowerCasedRecord[value] || value.toLowerCase() +} + function parseHeaders (headers, obj = {}) { // For H2 support if (!Array.isArray(headers)) return headers @@ -84869,6 +85005,7 @@ module.exports = { isIterable, isAsyncIterable, isDestroyed, + headerNameToString, parseRawHeaders, parseHeaders, parseKeepAliveTimeout, @@ -85148,6 +85285,14 @@ const { isUint8Array, isArrayBuffer } = __nccwpck_require__(29830) const { File: UndiciFile } = __nccwpck_require__(78511) const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(685) +let random +try { + const crypto = __nccwpck_require__(6005) + random = (max) => crypto.randomInt(0, max) +} catch { + random = (max) => Math.floor(Math.random(max)) +} + let ReadableStream = globalThis.ReadableStream /** @type {globalThis['File']} */ @@ -85233,7 +85378,7 @@ function extractBody (object, keepalive = false) { // Set source to a copy of the bytes held by object. source = new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength)) } else if (util.isFormDataLike(object)) { - const boundary = `----formdata-undici-0${`${Math.floor(Math.random() * 1e11)}`.padStart(11, '0')}` + const boundary = `----formdata-undici-0${`${random(1e11)}`.padStart(11, '0')}` const prefix = `--${boundary}\r\nContent-Disposition: form-data` /*! formdata-polyfill. MIT License. Jimmy Wärting */ @@ -89005,6 +89150,9 @@ function httpRedirectFetch (fetchParams, response) { // https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name request.headersList.delete('authorization') + // https://fetch.spec.whatwg.org/#authentication-entries + request.headersList.delete('proxy-authorization', true) + // "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement. request.headersList.delete('cookie') request.headersList.delete('host') @@ -91513,14 +91661,18 @@ const { isBlobLike, toUSVString, ReadableStreamFrom } = __nccwpck_require__(8398 const assert = __nccwpck_require__(39491) const { isUint8Array } = __nccwpck_require__(29830) +let supportedHashes = [] + // https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable /** @type {import('crypto')|undefined} */ let crypto try { crypto = __nccwpck_require__(6113) + const possibleRelevantHashes = ['sha256', 'sha384', 'sha512'] + supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash)) +/* c8 ignore next 3 */ } catch { - } function responseURL (response) { @@ -92048,66 +92200,56 @@ function bytesMatch (bytes, metadataList) { return true } - // 3. If parsedMetadata is the empty set, return true. + // 3. If response is not eligible for integrity validation, return false. + // TODO + + // 4. If parsedMetadata is the empty set, return true. if (parsedMetadata.length === 0) { return true } - // 4. Let metadata be the result of getting the strongest + // 5. Let metadata be the result of getting the strongest // metadata from parsedMetadata. - const list = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo)) - // get the strongest algorithm - const strongest = list[0].algo - // get all entries that use the strongest algorithm; ignore weaker - const metadata = list.filter((item) => item.algo === strongest) + const strongest = getStrongestMetadata(parsedMetadata) + const metadata = filterMetadataListByAlgorithm(parsedMetadata, strongest) - // 5. For each item in metadata: + // 6. For each item in metadata: for (const item of metadata) { // 1. Let algorithm be the alg component of item. const algorithm = item.algo // 2. Let expectedValue be the val component of item. - let expectedValue = item.hash + const expectedValue = item.hash // See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e // "be liberal with padding". This is annoying, and it's not even in the spec. - if (expectedValue.endsWith('==')) { - expectedValue = expectedValue.slice(0, -2) - } - // 3. Let actualValue be the result of applying algorithm to bytes. let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64') - if (actualValue.endsWith('==')) { - actualValue = actualValue.slice(0, -2) + if (actualValue[actualValue.length - 1] === '=') { + if (actualValue[actualValue.length - 2] === '=') { + actualValue = actualValue.slice(0, -2) + } else { + actualValue = actualValue.slice(0, -1) + } } // 4. If actualValue is a case-sensitive match for expectedValue, // return true. - if (actualValue === expectedValue) { - return true - } - - let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url') - - if (actualBase64URL.endsWith('==')) { - actualBase64URL = actualBase64URL.slice(0, -2) - } - - if (actualBase64URL === expectedValue) { + if (compareBase64Mixed(actualValue, expectedValue)) { return true } } - // 6. Return false. + // 7. Return false. return false } // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options // https://www.w3.org/TR/CSP2/#source-list-syntax // https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1 -const parseHashWithOptions = /((?sha256|sha384|sha512)-(?[A-z0-9+/]{1}.*={0,2}))( +[\x21-\x7e]?)?/i +const parseHashWithOptions = /(?sha256|sha384|sha512)-((?[A-Za-z0-9+/]+|[A-Za-z0-9_-]+)={0,2}(?:\s|$)( +[!-~]*)?)?/i /** * @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata @@ -92121,8 +92263,6 @@ function parseMetadata (metadata) { // 2. Let empty be equal to true. let empty = true - const supportedHashes = crypto.getHashes() - // 3. For each token returned by splitting metadata on spaces: for (const token of metadata.split(' ')) { // 1. Set empty to false. @@ -92132,7 +92272,11 @@ function parseMetadata (metadata) { const parsedToken = parseHashWithOptions.exec(token) // 3. If token does not parse, continue to the next token. - if (parsedToken === null || parsedToken.groups === undefined) { + if ( + parsedToken === null || + parsedToken.groups === undefined || + parsedToken.groups.algo === undefined + ) { // Note: Chromium blocks the request at this point, but Firefox // gives a warning that an invalid integrity was given. The // correct behavior is to ignore these, and subsequently not @@ -92141,11 +92285,11 @@ function parseMetadata (metadata) { } // 4. Let algorithm be the hash-algo component of token. - const algorithm = parsedToken.groups.algo + const algorithm = parsedToken.groups.algo.toLowerCase() // 5. If algorithm is a hash function recognized by the user // agent, add the parsed token to result. - if (supportedHashes.includes(algorithm.toLowerCase())) { + if (supportedHashes.includes(algorithm)) { result.push(parsedToken.groups) } } @@ -92158,6 +92302,82 @@ function parseMetadata (metadata) { return result } +/** + * @param {{ algo: 'sha256' | 'sha384' | 'sha512' }[]} metadataList + */ +function getStrongestMetadata (metadataList) { + // Let algorithm be the algo component of the first item in metadataList. + // Can be sha256 + let algorithm = metadataList[0].algo + // If the algorithm is sha512, then it is the strongest + // and we can return immediately + if (algorithm[3] === '5') { + return algorithm + } + + for (let i = 1; i < metadataList.length; ++i) { + const metadata = metadataList[i] + // If the algorithm is sha512, then it is the strongest + // and we can break the loop immediately + if (metadata.algo[3] === '5') { + algorithm = 'sha512' + break + // If the algorithm is sha384, then a potential sha256 or sha384 is ignored + } else if (algorithm[3] === '3') { + continue + // algorithm is sha256, check if algorithm is sha384 and if so, set it as + // the strongest + } else if (metadata.algo[3] === '3') { + algorithm = 'sha384' + } + } + return algorithm +} + +function filterMetadataListByAlgorithm (metadataList, algorithm) { + if (metadataList.length === 1) { + return metadataList + } + + let pos = 0 + for (let i = 0; i < metadataList.length; ++i) { + if (metadataList[i].algo === algorithm) { + metadataList[pos++] = metadataList[i] + } + } + + metadataList.length = pos + + return metadataList +} + +/** + * Compares two base64 strings, allowing for base64url + * in the second string. + * +* @param {string} actualValue always base64 + * @param {string} expectedValue base64 or base64url + * @returns {boolean} + */ +function compareBase64Mixed (actualValue, expectedValue) { + if (actualValue.length !== expectedValue.length) { + return false + } + for (let i = 0; i < actualValue.length; ++i) { + if (actualValue[i] !== expectedValue[i]) { + if ( + (actualValue[i] === '+' && expectedValue[i] === '-') || + (actualValue[i] === '/' && expectedValue[i] === '_') + ) { + continue + } + return false + } + } + + return true +} + // https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) { // TODO @@ -92573,7 +92793,8 @@ module.exports = { urlHasHttpsScheme, urlIsHttpHttpsScheme, readAllBytes, - normalizeMethodRecord + normalizeMethodRecord, + parseMetadata } @@ -94660,12 +94881,17 @@ function parseLocation (statusCode, headers) { // https://tools.ietf.org/html/rfc7231#section-6.4.4 function shouldRemoveHeader (header, removeContent, unknownOrigin) { - return ( - (header.length === 4 && header.toString().toLowerCase() === 'host') || - (removeContent && header.toString().toLowerCase().indexOf('content-') === 0) || - (unknownOrigin && header.length === 13 && header.toString().toLowerCase() === 'authorization') || - (unknownOrigin && header.length === 6 && header.toString().toLowerCase() === 'cookie') - ) + if (header.length === 4) { + return util.headerNameToString(header) === 'host' + } + if (removeContent && util.headerNameToString(header).startsWith('content-')) { + return true + } + if (unknownOrigin && (header.length === 13 || header.length === 6 || header.length === 19)) { + const name = util.headerNameToString(header) + return name === 'authorization' || name === 'cookie' || name === 'proxy-authorization' + } + return false } // https://tools.ietf.org/html/rfc7231#section-6.4 @@ -107482,6 +107708,14 @@ module.exports = require("net"); /***/ }), +/***/ 6005: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:crypto"); + +/***/ }), + /***/ 15673: /***/ ((module) => {