npm is the package manager for the Node JavaScript platform. It puts
modules in place so that node can find them, and manages dependency
diff --git a/deps/npm/lib/commands/publish.js b/deps/npm/lib/commands/publish.js
index 8d2aa9e0e47f68..7b3e930922ecab 100644
--- a/deps/npm/lib/commands/publish.js
+++ b/deps/npm/lib/commands/publish.js
@@ -89,7 +89,7 @@ class Publish extends BaseCommand {
// The purpose of re-reading the manifest is in case it changed,
// so that we send the latest and greatest thing to the registry
// note that publishConfig might have changed as well!
- manifest = await this.getManifest(spec, opts)
+ manifest = await this.getManifest(spec, opts, true)
// JSON already has the package contents
if (!json) {
@@ -196,11 +196,18 @@ class Publish extends BaseCommand {
// if it's a directory, read it from the file system
// otherwise, get the full metadata from whatever it is
// XXX can't pacote read the manifest from a directory?
- async getManifest (spec, opts) {
+ async getManifest (spec, opts, logWarnings = false) {
let manifest
if (spec.type === 'directory') {
+ const changes = []
+ const pkg = await pkgJson.fix(spec.fetchSpec, { changes })
+ if (changes.length && logWarnings) {
+ /* eslint-disable-next-line max-len */
+ log.warn('publish', 'npm auto-corrected some errors in your package.json when publishing. Please run "npm pkg fix" to address these errors.')
+ log.warn('publish', `errors corrected:\n${changes.join('\n')}`)
+ }
// Prepare is the special function for publishing, different than normalize
- const { content } = await pkgJson.prepare(spec.fetchSpec)
+ const { content } = await pkg.prepare()
manifest = content
} else {
manifest = await pacote.manifest(spec, {
diff --git a/deps/npm/man/man1/npm-ls.1 b/deps/npm/man/man1/npm-ls.1
index 18be3b12c6599e..af399edb102b6f 100644
--- a/deps/npm/man/man1/npm-ls.1
+++ b/deps/npm/man/man1/npm-ls.1
@@ -20,7 +20,7 @@ Positional arguments are \fBname@version-range\fR identifiers, which will limit
.P
.RS 2
.nf
-npm@9.8.0 /path/to/npm
+npm@9.8.1 /path/to/npm
└─┬ init-package-json@0.0.4
└── promzard@0.1.5
.fi
diff --git a/deps/npm/man/man1/npm.1 b/deps/npm/man/man1/npm.1
index 114d4defc34b8b..cbb25b2aa1a32d 100644
--- a/deps/npm/man/man1/npm.1
+++ b/deps/npm/man/man1/npm.1
@@ -12,7 +12,7 @@ npm
Note: This command is unaware of workspaces.
.SS "Version"
.P
-9.8.0
+9.8.1
.SS "Description"
.P
npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency conflicts intelligently.
diff --git a/deps/npm/node_modules/@npmcli/arborist/package.json b/deps/npm/node_modules/@npmcli/arborist/package.json
index 712d01b47b3345..a9ec27bacb0035 100644
--- a/deps/npm/node_modules/@npmcli/arborist/package.json
+++ b/deps/npm/node_modules/@npmcli/arborist/package.json
@@ -39,7 +39,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.14.1",
+ "@npmcli/template-oss": "4.18.0",
"benchmark": "^2.1.4",
"minify-registry-metadata": "^3.0.0",
"nock": "^13.3.0",
@@ -91,7 +91,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.14.1",
+ "version": "4.18.0",
"content": "../../scripts/template-oss/index.js"
}
}
diff --git a/deps/npm/node_modules/@npmcli/config/package.json b/deps/npm/node_modules/@npmcli/config/package.json
index 420981b4659fc1..76d193ba23ec4c 100644
--- a/deps/npm/node_modules/@npmcli/config/package.json
+++ b/deps/npm/node_modules/@npmcli/config/package.json
@@ -32,7 +32,7 @@
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
"@npmcli/mock-globals": "^1.0.0",
- "@npmcli/template-oss": "4.14.1",
+ "@npmcli/template-oss": "4.18.0",
"tap": "^16.3.4"
},
"dependencies": {
@@ -50,6 +50,6 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.14.1"
+ "version": "4.18.0"
}
}
diff --git a/deps/npm/node_modules/@npmcli/package-json/lib/index.js b/deps/npm/node_modules/@npmcli/package-json/lib/index.js
index 53558a3977e4d1..0cc41c685a39e7 100644
--- a/deps/npm/node_modules/@npmcli/package-json/lib/index.js
+++ b/deps/npm/node_modules/@npmcli/package-json/lib/index.js
@@ -42,9 +42,7 @@ class PackageJson {
'fixNameField',
'fixVersionField',
'fixRepositoryField',
- 'fixBinField',
'fixDependencies',
- 'fixScriptsField',
'devDependencies',
'scriptpath',
])
diff --git a/deps/npm/node_modules/@npmcli/package-json/lib/normalize.js b/deps/npm/node_modules/@npmcli/package-json/lib/normalize.js
index 726b3f031115b9..204d4d8a8e7dd6 100644
--- a/deps/npm/node_modules/@npmcli/package-json/lib/normalize.js
+++ b/deps/npm/node_modules/@npmcli/package-json/lib/normalize.js
@@ -1,11 +1,89 @@
+const semver = require('semver')
const fs = require('fs/promises')
const { glob } = require('glob')
-const normalizePackageBin = require('npm-normalize-package-bin')
const legacyFixer = require('normalize-package-data/lib/fixer.js')
const legacyMakeWarning = require('normalize-package-data/lib/make_warning.js')
const path = require('path')
const log = require('proc-log')
const git = require('@npmcli/git')
+const hostedGitInfo = require('hosted-git-info')
+
+// used to be npm-normalize-package-bin
+function normalizePackageBin (pkg, changes) {
+ if (pkg.bin) {
+ if (typeof pkg.bin === 'string' && pkg.name) {
+ changes?.push('"bin" was converted to an object')
+ pkg.bin = { [pkg.name]: pkg.bin }
+ } else if (Array.isArray(pkg.bin)) {
+ changes?.push('"bin" was converted to an object')
+ pkg.bin = pkg.bin.reduce((acc, k) => {
+ acc[path.basename(k)] = k
+ return acc
+ }, {})
+ }
+ if (typeof pkg.bin === 'object') {
+ for (const binKey in pkg.bin) {
+ if (typeof pkg.bin[binKey] !== 'string') {
+ delete pkg.bin[binKey]
+ changes?.push(`removed invalid "bin[${binKey}]"`)
+ continue
+ }
+ const base = path.join('/', path.basename(binKey.replace(/\\|:/g, '/'))).slice(1)
+ if (!base) {
+ delete pkg.bin[binKey]
+ changes?.push(`removed invalid "bin[${binKey}]"`)
+ continue
+ }
+
+ const binTarget = path.join('/', pkg.bin[binKey].replace(/\\/g, '/'))
+ .replace(/\\/g, '/').slice(1)
+
+ if (!binTarget) {
+ delete pkg.bin[binKey]
+ changes?.push(`removed invalid "bin[${binKey}]"`)
+ continue
+ }
+
+ if (base !== binKey) {
+ delete pkg.bin[binKey]
+ changes?.push(`"bin[${binKey}]" was renamed to "bin[${base}]"`)
+ }
+ if (binTarget !== pkg.bin[binKey]) {
+ changes?.push(`"bin[${base}]" script name was cleaned`)
+ }
+ pkg.bin[base] = binTarget
+ }
+
+ if (Object.keys(pkg.bin).length === 0) {
+ changes?.push('empty "bin" was removed')
+ delete pkg.bin
+ }
+
+ return pkg
+ }
+ }
+ delete pkg.bin
+}
+
+function isCorrectlyEncodedName (spec) {
+ return !spec.match(/[/@\s+%:]/) &&
+ spec === encodeURIComponent(spec)
+}
+
+function isValidScopedPackageName (spec) {
+ if (spec.charAt(0) !== '@') {
+ return false
+ }
+
+ const rest = spec.slice(1).split('/')
+ if (rest.length !== 2) {
+ return false
+ }
+
+ return rest[0] && rest[1] &&
+ rest[0] === encodeURIComponent(rest[0]) &&
+ rest[1] === encodeURIComponent(rest[1])
+}
// We don't want the `changes` array in here by default because this is a hot
// path for parsing packuments during install. So the calling method passes it
@@ -18,17 +96,49 @@ const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase })
const scripts = data.scripts || {}
const pkgId = `${data.name ?? ''}@${data.version ?? ''}`
- legacyFixer.warn = function () {
- changes?.push(legacyMakeWarning.apply(null, arguments))
- }
-
// name and version are load bearing so we have to clean them up first
if (steps.includes('fixNameField') || steps.includes('normalizeData')) {
- legacyFixer.fixNameField(data, { strict, allowLegacyCase })
+ if (!data.name && !strict) {
+ changes?.push('Missing "name" field was set to an empty string')
+ data.name = ''
+ } else {
+ if (typeof data.name !== 'string') {
+ throw new Error('name field must be a string.')
+ }
+ if (!strict) {
+ const name = data.name.trim()
+ if (data.name !== name) {
+ changes?.push(`Whitespace was trimmed from "name"`)
+ data.name = name
+ }
+ }
+
+ if (data.name.startsWith('.') ||
+ !(isValidScopedPackageName(data.name) || isCorrectlyEncodedName(data.name)) ||
+ (strict && (!allowLegacyCase) && data.name !== data.name.toLowerCase()) ||
+ data.name.toLowerCase() === 'node_modules' ||
+ data.name.toLowerCase() === 'favicon.ico') {
+ throw new Error('Invalid name: ' + JSON.stringify(data.name))
+ }
+ }
}
if (steps.includes('fixVersionField') || steps.includes('normalizeData')) {
- legacyFixer.fixVersionField(data, strict)
+ // allow "loose" semver 1.0 versions in non-strict mode
+ // enforce strict semver 2.0 compliance in strict mode
+ const loose = !strict
+ if (!data.version) {
+ data.version = ''
+ } else {
+ if (!semver.valid(data.version, loose)) {
+ throw new Error(`Invalid version: "${data.version}"`)
+ }
+ const version = semver.clean(data.version, loose)
+ if (version !== data.version) {
+ changes?.push(`"version" was cleaned and set to "${version}"`)
+ data.version = version
+ }
+ }
}
// remove attributes that start with "_"
if (steps.includes('_attributes')) {
@@ -49,6 +159,7 @@ const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase })
}
// fix bundledDependencies typo
+ // normalize bundleDependencies
if (steps.includes('bundledDependencies')) {
if (data.bundleDependencies === undefined && data.bundledDependencies !== undefined) {
data.bundleDependencies = data.bundledDependencies
@@ -70,7 +181,7 @@ const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase })
changes?.push(`"bundleDependencies" was changed from an object to an array`)
data.bundleDependencies = Object.keys(bd)
}
- } else {
+ } else if ('bundleDependencies' in data) {
changes?.push(`"bundleDependencies" was removed`)
delete data.bundleDependencies
}
@@ -84,11 +195,11 @@ const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase })
if (data.dependencies &&
data.optionalDependencies && typeof data.optionalDependencies === 'object') {
for (const name in data.optionalDependencies) {
- changes?.push(`optionalDependencies entry "${name}" was removed`)
+ changes?.push(`optionalDependencies."${name}" was removed`)
delete data.dependencies[name]
}
if (!Object.keys(data.dependencies).length) {
- changes?.push(`empty "optionalDependencies" was removed`)
+ changes?.push(`Empty "optionalDependencies" was removed`)
delete data.dependencies
}
}
@@ -121,20 +232,21 @@ const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase })
}
// strip "node_modules/.bin" from scripts entries
+ // remove invalid scripts entries (non-strings)
if (steps.includes('scripts') || steps.includes('scriptpath')) {
const spre = /^(\.[/\\])?node_modules[/\\].bin[\\/]/
if (typeof data.scripts === 'object') {
for (const name in data.scripts) {
if (typeof data.scripts[name] !== 'string') {
delete data.scripts[name]
- changes?.push(`invalid scripts entry "${name}" was removed`)
- } else if (steps.includes('scriptpath')) {
+ changes?.push(`Invalid scripts."${name}" was removed`)
+ } else if (steps.includes('scriptpath') && spre.test(data.scripts[name])) {
data.scripts[name] = data.scripts[name].replace(spre, '')
changes?.push(`scripts entry "${name}" was fixed to remove node_modules/.bin reference`)
}
}
} else {
- changes?.push(`removed invalid "scripts"`)
+ changes?.push(`Removed invalid "scripts"`)
delete data.scripts
}
}
@@ -154,7 +266,7 @@ const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase })
.map(line => line.replace(/^\s*#.*$/, '').trim())
.filter(line => line)
data.contributors = authors
- changes.push('"contributors" was auto-populated with the contents of the "AUTHORS" file')
+ changes?.push('"contributors" was auto-populated with the contents of the "AUTHORS" file')
} catch {
// do nothing
}
@@ -201,7 +313,7 @@ const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase })
}
if (steps.includes('bin') || steps.includes('binDir') || steps.includes('binRefs')) {
- normalizePackageBin(data)
+ normalizePackageBin(data, changes)
}
// expand "directories.bin"
@@ -216,7 +328,7 @@ const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase })
return acc
}, {})
// *sigh*
- normalizePackageBin(data)
+ normalizePackageBin(data, changes)
}
// populate "gitHead" attribute
@@ -320,22 +432,96 @@ const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase })
// Some steps are isolated so we can do a limited subset of these in `fix`
if (steps.includes('fixRepositoryField') || steps.includes('normalizeData')) {
- legacyFixer.fixRepositoryField(data)
- }
-
- if (steps.includes('fixBinField') || steps.includes('normalizeData')) {
- legacyFixer.fixBinField(data)
+ if (data.repositories) {
+ /* eslint-disable-next-line max-len */
+ changes?.push(`"repository" was set to the first entry in "repositories" (${data.repository})`)
+ data.repository = data.repositories[0]
+ }
+ if (data.repository) {
+ if (typeof data.repository === 'string') {
+ changes?.push('"repository" was changed from a string to an object')
+ data.repository = {
+ type: 'git',
+ url: data.repository,
+ }
+ }
+ if (data.repository.url) {
+ const hosted = hostedGitInfo.fromUrl(data.repository.url)
+ let r
+ if (hosted) {
+ if (hosted.getDefaultRepresentation() === 'shortcut') {
+ r = hosted.https()
+ } else {
+ r = hosted.toString()
+ }
+ if (r !== data.repository.url) {
+ changes?.push(`"repository.url" was normalized to "${r}"`)
+ data.repository.url = r
+ }
+ }
+ }
+ }
}
if (steps.includes('fixDependencies') || steps.includes('normalizeData')) {
- legacyFixer.fixDependencies(data, strict)
- }
+ // peerDependencies?
+ // devDependencies is meaningless here, it's ignored on an installed package
+ for (const type of ['dependencies', 'devDependencies', 'optionalDependencies']) {
+ if (data[type]) {
+ let secondWarning = true
+ if (typeof data[type] === 'string') {
+ changes?.push(`"${type}" was converted from a string into an object`)
+ data[type] = data[type].trim().split(/[\n\r\s\t ,]+/)
+ secondWarning = false
+ }
+ if (Array.isArray(data[type])) {
+ if (secondWarning) {
+ changes?.push(`"${type}" was converted from an array into an object`)
+ }
+ const o = {}
+ for (const d of data[type]) {
+ if (typeof d === 'string') {
+ const dep = d.trim().split(/(:?[@\s><=])/)
+ const dn = dep.shift()
+ const dv = dep.join('').replace(/^@/, '').trim()
+ o[dn] = dv
+ }
+ }
+ data[type] = o
+ }
+ }
+ }
+ // normalize-package-data used to put optional dependencies BACK into
+ // dependencies here, we no longer do this
- if (steps.includes('fixScriptsField') || steps.includes('normalizeData')) {
- legacyFixer.fixScriptsField(data)
+ for (const deps of ['dependencies', 'devDependencies']) {
+ if (deps in data) {
+ if (!data[deps] || typeof data[deps] !== 'object') {
+ changes?.push(`Removed invalid "${deps}"`)
+ delete data[deps]
+ } else {
+ for (const d in data[deps]) {
+ const r = data[deps][d]
+ if (typeof r !== 'string') {
+ changes?.push(`Removed invalid "${deps}.${d}"`)
+ delete data[deps][d]
+ }
+ const hosted = hostedGitInfo.fromUrl(data[deps][d])?.toString()
+ if (hosted && hosted !== data[deps][d]) {
+ changes?.push(`Normalized git reference to "${deps}.${d}"`)
+ data[deps][d] = hosted.toString()
+ }
+ }
+ }
+ }
+ }
}
if (steps.includes('normalizeData')) {
+ legacyFixer.warn = function () {
+ changes?.push(legacyMakeWarning.apply(null, arguments))
+ }
+
const legacySteps = [
'fixDescriptionField',
'fixModulesField',
diff --git a/deps/npm/node_modules/@npmcli/package-json/package.json b/deps/npm/node_modules/@npmcli/package-json/package.json
index 4b9584dcad3707..33215b638db6ee 100644
--- a/deps/npm/node_modules/@npmcli/package-json/package.json
+++ b/deps/npm/node_modules/@npmcli/package-json/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/package-json",
- "version": "4.0.0",
+ "version": "4.0.1",
"description": "Programmatic API to update package.json",
"main": "lib/index.js",
"files": [
@@ -25,7 +25,7 @@
"license": "ISC",
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.15.1",
+ "@npmcli/template-oss": "4.17.0",
"read-package-json": "^6.0.4",
"read-package-json-fast": "^3.0.2",
"tap": "^16.0.1"
@@ -33,10 +33,11 @@
"dependencies": {
"@npmcli/git": "^4.1.0",
"glob": "^10.2.2",
+ "hosted-git-info": "^6.1.1",
"json-parse-even-better-errors": "^3.0.0",
"normalize-package-data": "^5.0.0",
- "npm-normalize-package-bin": "^3.0.1",
- "proc-log": "^3.0.0"
+ "proc-log": "^3.0.0",
+ "semver": "^7.5.3"
},
"repository": {
"type": "git",
@@ -47,7 +48,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.15.1",
+ "version": "4.17.0",
"publish": "true"
},
"tap": {
diff --git a/deps/npm/node_modules/bin-links/lib/link-gently.js b/deps/npm/node_modules/bin-links/lib/link-gently.js
index 89ca0f6bf6b995..d1e955ec99b029 100644
--- a/deps/npm/node_modules/bin-links/lib/link-gently.js
+++ b/deps/npm/node_modules/bin-links/lib/link-gently.js
@@ -28,7 +28,7 @@ const CLOBBER = Symbol('clobber - ours or in forceful mode')
const linkGently = async ({ path, to, from, absFrom, force }) => {
if (seen.has(to)) {
- return true
+ return false
}
seen.add(to)
diff --git a/deps/npm/node_modules/bin-links/package.json b/deps/npm/node_modules/bin-links/package.json
index 589245a9313911..e6abb0b589808b 100644
--- a/deps/npm/node_modules/bin-links/package.json
+++ b/deps/npm/node_modules/bin-links/package.json
@@ -1,6 +1,6 @@
{
"name": "bin-links",
- "version": "4.0.1",
+ "version": "4.0.2",
"description": "JavaScript package binary linker",
"main": "./lib/index.js",
"scripts": {
@@ -30,7 +30,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.5.1",
+ "@npmcli/template-oss": "4.15.1",
"require-inject": "^1.4.4",
"tap": "^16.0.1"
},
@@ -53,6 +53,7 @@
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"windowsCI": false,
- "version": "4.5.1"
+ "version": "4.15.1",
+ "publish": true
}
}
diff --git a/deps/npm/node_modules/chalk/package.json b/deps/npm/node_modules/chalk/package.json
index ddcf7589e9797d..3c500105bcbf25 100644
--- a/deps/npm/node_modules/chalk/package.json
+++ b/deps/npm/node_modules/chalk/package.json
@@ -1,6 +1,6 @@
{
"name": "chalk",
- "version": "5.2.0",
+ "version": "5.3.0",
"description": "Terminal string styling done right",
"license": "MIT",
"repository": "chalk/chalk",
@@ -61,12 +61,14 @@
"xo": "^0.53.0",
"yoctodelay": "^2.0.0"
},
+ "sideEffects": false,
"xo": {
"rules": {
"unicorn/prefer-string-slice": "off",
"@typescript-eslint/consistent-type-imports": "off",
"@typescript-eslint/consistent-type-exports": "off",
- "@typescript-eslint/consistent-type-definitions": "off"
+ "@typescript-eslint/consistent-type-definitions": "off",
+ "unicorn/expiring-todo-comments": "off"
}
},
"c8": {
diff --git a/deps/npm/node_modules/chalk/source/vendor/supports-color/index.js b/deps/npm/node_modules/chalk/source/vendor/supports-color/index.js
index a7cea61e9eb5fd..4ce0a2da8d2242 100644
--- a/deps/npm/node_modules/chalk/source/vendor/supports-color/index.js
+++ b/deps/npm/node_modules/chalk/source/vendor/supports-color/index.js
@@ -3,6 +3,7 @@ import os from 'node:os';
import tty from 'node:tty';
// From: https://github.com/sindresorhus/has-flag/blob/main/index.js
+/// function hasFlag(flag, argv = globalThis.Deno?.args ?? process.argv) {
function hasFlag(flag, argv = globalThis.Deno ? globalThis.Deno.args : process.argv) {
const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--');
const position = argv.indexOf(prefix + flag);
@@ -111,7 +112,7 @@ function _supportsColor(haveStream, {streamIsTTY, sniffFlags = true} = {}) {
}
if ('CI' in env) {
- if ('GITHUB_ACTIONS' in env) {
+ if ('GITHUB_ACTIONS' in env || 'GITEA_ACTIONS' in env) {
return 3;
}
diff --git a/deps/npm/node_modules/libnpmaccess/package.json b/deps/npm/node_modules/libnpmaccess/package.json
index 42ea3c661c2911..713cf8c264c986 100644
--- a/deps/npm/node_modules/libnpmaccess/package.json
+++ b/deps/npm/node_modules/libnpmaccess/package.json
@@ -17,7 +17,7 @@
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
"@npmcli/mock-registry": "^1.0.0",
- "@npmcli/template-oss": "4.14.1",
+ "@npmcli/template-oss": "4.18.0",
"nock": "^13.3.0",
"tap": "^16.3.4"
},
@@ -41,7 +41,7 @@
],
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.14.1",
+ "version": "4.18.0",
"content": "../../scripts/template-oss/index.js"
},
"tap": {
diff --git a/deps/npm/node_modules/libnpmdiff/package.json b/deps/npm/node_modules/libnpmdiff/package.json
index 2ef51fb8e03b70..ce6eb3531b32ed 100644
--- a/deps/npm/node_modules/libnpmdiff/package.json
+++ b/deps/npm/node_modules/libnpmdiff/package.json
@@ -42,7 +42,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.14.1",
+ "@npmcli/template-oss": "4.18.0",
"tap": "^16.3.4"
},
"dependencies": {
@@ -58,7 +58,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.14.1",
+ "version": "4.18.0",
"content": "../../scripts/template-oss/index.js"
},
"tap": {
diff --git a/deps/npm/node_modules/libnpmexec/lib/index.js b/deps/npm/node_modules/libnpmexec/lib/index.js
index b7aa43588c0fd8..34bb20769bc2c7 100644
--- a/deps/npm/node_modules/libnpmexec/lib/index.js
+++ b/deps/npm/node_modules/libnpmexec/lib/index.js
@@ -245,9 +245,12 @@ const exec = async (opts) => {
if (add.length) {
if (!yes) {
+ const missingPackages = add.map(a => `${a.replace(/@$/, '')}`)
// set -n to always say no
if (yes === false) {
- throw new Error('canceled')
+ // Error message lists missing package(s) when process is canceled
+ /* eslint-disable-next-line max-len */
+ throw new Error(`npx canceled due to missing packages and no YES option: ${JSON.stringify(missingPackages)}`)
}
if (noTTY() || ciInfo.isCI) {
@@ -257,8 +260,7 @@ const exec = async (opts) => {
add.map((pkg) => pkg.replace(/@$/, '')).join(', ')
}`)
} else {
- const addList = add.map(a => ` ${a.replace(/@$/, '')}`)
- .join('\n') + '\n'
+ const addList = missingPackages.join('\n') + '\n'
const prompt = `Need to install the following packages:\n${
addList
}Ok to proceed? `
diff --git a/deps/npm/node_modules/libnpmexec/package.json b/deps/npm/node_modules/libnpmexec/package.json
index 290d895f5ee60e..9b86b81a998ef7 100644
--- a/deps/npm/node_modules/libnpmexec/package.json
+++ b/deps/npm/node_modules/libnpmexec/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmexec",
- "version": "6.0.2",
+ "version": "6.0.3",
"files": [
"bin/",
"lib/"
@@ -51,7 +51,7 @@
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
"@npmcli/mock-registry": "^1.0.0",
- "@npmcli/template-oss": "4.14.1",
+ "@npmcli/template-oss": "4.18.0",
"bin-links": "^4.0.1",
"chalk": "^5.2.0",
"just-extend": "^6.2.0",
@@ -73,7 +73,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.14.1",
+ "version": "4.18.0",
"content": "../../scripts/template-oss/index.js"
}
}
diff --git a/deps/npm/node_modules/libnpmfund/package.json b/deps/npm/node_modules/libnpmfund/package.json
index 8e0b6d083715e3..0c863c2f92203a 100644
--- a/deps/npm/node_modules/libnpmfund/package.json
+++ b/deps/npm/node_modules/libnpmfund/package.json
@@ -41,7 +41,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.14.1",
+ "@npmcli/template-oss": "4.18.0",
"tap": "^16.3.4"
},
"dependencies": {
@@ -52,7 +52,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.14.1",
+ "version": "4.18.0",
"content": "../../scripts/template-oss/index.js"
},
"tap": {
diff --git a/deps/npm/node_modules/libnpmhook/package.json b/deps/npm/node_modules/libnpmhook/package.json
index bc439929e7c7a1..05b34dda75c416 100644
--- a/deps/npm/node_modules/libnpmhook/package.json
+++ b/deps/npm/node_modules/libnpmhook/package.json
@@ -35,7 +35,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.14.1",
+ "@npmcli/template-oss": "4.18.0",
"nock": "^13.3.0",
"tap": "^16.3.4"
},
@@ -44,7 +44,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.14.1",
+ "version": "4.18.0",
"content": "../../scripts/template-oss/index.js"
},
"tap": {
diff --git a/deps/npm/node_modules/libnpmorg/package.json b/deps/npm/node_modules/libnpmorg/package.json
index 0ee6abd7455a60..675d03b5b2437a 100644
--- a/deps/npm/node_modules/libnpmorg/package.json
+++ b/deps/npm/node_modules/libnpmorg/package.json
@@ -28,7 +28,7 @@
],
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.14.1",
+ "@npmcli/template-oss": "4.18.0",
"minipass": "^5.0.0",
"nock": "^13.3.0",
"tap": "^16.3.4"
@@ -49,7 +49,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.14.1",
+ "version": "4.18.0",
"content": "../../scripts/template-oss/index.js"
},
"tap": {
diff --git a/deps/npm/node_modules/libnpmpack/package.json b/deps/npm/node_modules/libnpmpack/package.json
index 12cf7aa8ee3b10..d8861c337c4d99 100644
--- a/deps/npm/node_modules/libnpmpack/package.json
+++ b/deps/npm/node_modules/libnpmpack/package.json
@@ -23,7 +23,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.14.1",
+ "@npmcli/template-oss": "4.18.0",
"nock": "^13.3.0",
"spawk": "^1.7.1",
"tap": "^16.3.4"
@@ -46,7 +46,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.14.1",
+ "version": "4.18.0",
"content": "../../scripts/template-oss/index.js"
},
"tap": {
diff --git a/deps/npm/node_modules/libnpmpublish/package.json b/deps/npm/node_modules/libnpmpublish/package.json
index 6ea6a7181b0b71..7c7533a82c735f 100644
--- a/deps/npm/node_modules/libnpmpublish/package.json
+++ b/deps/npm/node_modules/libnpmpublish/package.json
@@ -26,7 +26,7 @@
"@npmcli/eslint-config": "^4.0.0",
"@npmcli/mock-globals": "^1.0.0",
"@npmcli/mock-registry": "^1.0.0",
- "@npmcli/template-oss": "4.14.1",
+ "@npmcli/template-oss": "4.18.0",
"lodash.clonedeep": "^4.5.0",
"nock": "^13.3.0",
"tap": "^16.3.4"
@@ -53,7 +53,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.14.1",
+ "version": "4.18.0",
"content": "../../scripts/template-oss/index.js"
},
"tap": {
diff --git a/deps/npm/node_modules/libnpmsearch/package.json b/deps/npm/node_modules/libnpmsearch/package.json
index e7dd7aca9baf16..32cb1f21b64221 100644
--- a/deps/npm/node_modules/libnpmsearch/package.json
+++ b/deps/npm/node_modules/libnpmsearch/package.json
@@ -26,7 +26,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.14.1",
+ "@npmcli/template-oss": "4.18.0",
"nock": "^13.3.0",
"tap": "^16.3.4"
},
@@ -45,7 +45,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.14.1",
+ "version": "4.18.0",
"content": "../../scripts/template-oss/index.js"
},
"tap": {
diff --git a/deps/npm/node_modules/libnpmteam/package.json b/deps/npm/node_modules/libnpmteam/package.json
index 5558224050eec7..33a77095fe8489 100644
--- a/deps/npm/node_modules/libnpmteam/package.json
+++ b/deps/npm/node_modules/libnpmteam/package.json
@@ -16,7 +16,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.14.1",
+ "@npmcli/template-oss": "4.18.0",
"nock": "^13.3.0",
"tap": "^16.3.4"
},
@@ -39,7 +39,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.14.1",
+ "version": "4.18.0",
"content": "../../scripts/template-oss/index.js"
},
"tap": {
diff --git a/deps/npm/node_modules/libnpmversion/package.json b/deps/npm/node_modules/libnpmversion/package.json
index 7bae86d8afe228..469f9c2bc00d67 100644
--- a/deps/npm/node_modules/libnpmversion/package.json
+++ b/deps/npm/node_modules/libnpmversion/package.json
@@ -32,7 +32,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.14.1",
+ "@npmcli/template-oss": "4.18.0",
"require-inject": "^1.4.4",
"tap": "^16.3.4"
},
@@ -48,7 +48,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.14.1",
+ "version": "4.18.0",
"content": "../../scripts/template-oss/index.js"
}
}
diff --git a/deps/npm/node_modules/minimatch/dist/cjs/ast.js b/deps/npm/node_modules/minimatch/dist/cjs/ast.js
index 191e7e871c51d9..0b0cc8f3c50b3d 100644
--- a/deps/npm/node_modules/minimatch/dist/cjs/ast.js
+++ b/deps/npm/node_modules/minimatch/dist/cjs/ast.js
@@ -10,7 +10,7 @@ const isExtglobType = (c) => types.has(c);
// entire string, or just a single path portion, to prevent dots
// and/or traversal patterns, when needed.
// Exts don't need the ^ or / bit, because the root binds that already.
-const startNoTraversal = '(?!\\.\\.?(?:$|/))';
+const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))';
const startNoDot = '(?!\\.)';
// characters that indicate a start of pattern needs the "no dots" bit,
// because a dot *might* be matched. ( is not in the list, because in
@@ -407,7 +407,8 @@ class AST {
// - Since the start for a join is eg /(?!\.) and the start for a part
// is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
// or start or whatever) and prepend ^ or / at the Regexp construction.
- toRegExpSource() {
+ toRegExpSource(allowDot) {
+ const dot = allowDot ?? !!this.#options.dot;
if (this.#root === this)
this.#fillNegs();
if (!this.type) {
@@ -416,7 +417,7 @@ class AST {
.map(p => {
const [re, _, hasMagic, uflag] = typeof p === 'string'
? AST.#parseGlob(p, this.#hasMagic, noEmpty)
- : p.toRegExpSource();
+ : p.toRegExpSource(allowDot);
this.#hasMagic = this.#hasMagic || hasMagic;
this.#uflag = this.#uflag || uflag;
return re;
@@ -436,14 +437,14 @@ class AST {
// and prevent that.
const needNoTrav =
// dots are allowed, and the pattern starts with [ or .
- (this.#options.dot && aps.has(src.charAt(0))) ||
+ (dot && aps.has(src.charAt(0))) ||
// the pattern starts with \., and then [ or .
(src.startsWith('\\.') && aps.has(src.charAt(2))) ||
// the pattern starts with \.\., and then [ or .
(src.startsWith('\\.\\.') && aps.has(src.charAt(4)));
// no need to prevent dots if it can't match a dot, or if a
// sub-pattern will be preventing it anyway.
- const needNoDot = !this.#options.dot && aps.has(src.charAt(0));
+ const needNoDot = !dot && !allowDot && aps.has(src.charAt(0));
start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : '';
}
}
@@ -463,23 +464,13 @@ class AST {
this.#uflag,
];
}
+ // We need to calculate the body *twice* if it's a repeat pattern
+ // at the start, once in nodot mode, then again in dot mode, so a
+ // pattern like *(?) can match 'x.y'
+ const repeated = this.type === '*' || this.type === '+';
// some kind of extglob
const start = this.type === '!' ? '(?:(?!(?:' : '(?:';
- const body = this.#parts
- .map(p => {
- // extglob ASTs should only contain parent ASTs
- /* c8 ignore start */
- if (typeof p === 'string') {
- throw new Error('string type in extglob ast??');
- }
- /* c8 ignore stop */
- // can ignore hasMagic, because extglobs are already always magic
- const [re, _, _hasMagic, uflag] = p.toRegExpSource();
- this.#uflag = this.#uflag || uflag;
- return re;
- })
- .filter(p => !(this.isStart() && this.isEnd()) || !!p)
- .join('|');
+ let body = this.#partsToRegExp(dot);
if (this.isStart() && this.isEnd() && !body && this.type !== '!') {
// invalid extglob, has to at least be *something* present, if it's
// the entire path portion.
@@ -489,22 +480,37 @@ class AST {
this.#hasMagic = undefined;
return [s, (0, unescape_js_1.unescape)(this.toString()), false, false];
}
+ // XXX abstract out this map method
+ let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot
+ ? ''
+ : this.#partsToRegExp(true);
+ if (bodyDotAllowed === body) {
+ bodyDotAllowed = '';
+ }
+ if (bodyDotAllowed) {
+ body = `(?:${body})(?:${bodyDotAllowed})*?`;
+ }
// an empty !() is exactly equivalent to a starNoEmpty
let final = '';
if (this.type === '!' && this.#emptyExt) {
- final =
- (this.isStart() && !this.#options.dot ? startNoDot : '') + starNoEmpty;
+ final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty;
}
else {
const close = this.type === '!'
? // !() must match something,but !(x) can match ''
'))' +
- (this.isStart() && !this.#options.dot ? startNoDot : '') +
+ (this.isStart() && !dot && !allowDot ? startNoDot : '') +
star +
')'
: this.type === '@'
? ')'
- : `)${this.type}`;
+ : this.type === '?'
+ ? ')?'
+ : this.type === '+' && bodyDotAllowed
+ ? ')'
+ : this.type === '*' && bodyDotAllowed
+ ? `)?`
+ : `)${this.type}`;
final = start + body + close;
}
return [
@@ -514,6 +520,23 @@ class AST {
this.#uflag,
];
}
+ #partsToRegExp(dot) {
+ return this.#parts
+ .map(p => {
+ // extglob ASTs should only contain parent ASTs
+ /* c8 ignore start */
+ if (typeof p === 'string') {
+ throw new Error('string type in extglob ast??');
+ }
+ /* c8 ignore stop */
+ // can ignore hasMagic, because extglobs are already always magic
+ const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
+ this.#uflag = this.#uflag || uflag;
+ return re;
+ })
+ .filter(p => !(this.isStart() && this.isEnd()) || !!p)
+ .join('|');
+ }
static #parseGlob(glob, hasMagic, noEmpty = false) {
let escaping = false;
let re = '';
diff --git a/deps/npm/node_modules/minimatch/dist/mjs/ast.js b/deps/npm/node_modules/minimatch/dist/mjs/ast.js
index 9836fe7b1db023..7fb1f83e6182a0 100644
--- a/deps/npm/node_modules/minimatch/dist/mjs/ast.js
+++ b/deps/npm/node_modules/minimatch/dist/mjs/ast.js
@@ -7,7 +7,7 @@ const isExtglobType = (c) => types.has(c);
// entire string, or just a single path portion, to prevent dots
// and/or traversal patterns, when needed.
// Exts don't need the ^ or / bit, because the root binds that already.
-const startNoTraversal = '(?!\\.\\.?(?:$|/))';
+const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))';
const startNoDot = '(?!\\.)';
// characters that indicate a start of pattern needs the "no dots" bit,
// because a dot *might* be matched. ( is not in the list, because in
@@ -404,7 +404,8 @@ export class AST {
// - Since the start for a join is eg /(?!\.) and the start for a part
// is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
// or start or whatever) and prepend ^ or / at the Regexp construction.
- toRegExpSource() {
+ toRegExpSource(allowDot) {
+ const dot = allowDot ?? !!this.#options.dot;
if (this.#root === this)
this.#fillNegs();
if (!this.type) {
@@ -413,7 +414,7 @@ export class AST {
.map(p => {
const [re, _, hasMagic, uflag] = typeof p === 'string'
? AST.#parseGlob(p, this.#hasMagic, noEmpty)
- : p.toRegExpSource();
+ : p.toRegExpSource(allowDot);
this.#hasMagic = this.#hasMagic || hasMagic;
this.#uflag = this.#uflag || uflag;
return re;
@@ -433,14 +434,14 @@ export class AST {
// and prevent that.
const needNoTrav =
// dots are allowed, and the pattern starts with [ or .
- (this.#options.dot && aps.has(src.charAt(0))) ||
+ (dot && aps.has(src.charAt(0))) ||
// the pattern starts with \., and then [ or .
(src.startsWith('\\.') && aps.has(src.charAt(2))) ||
// the pattern starts with \.\., and then [ or .
(src.startsWith('\\.\\.') && aps.has(src.charAt(4)));
// no need to prevent dots if it can't match a dot, or if a
// sub-pattern will be preventing it anyway.
- const needNoDot = !this.#options.dot && aps.has(src.charAt(0));
+ const needNoDot = !dot && !allowDot && aps.has(src.charAt(0));
start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : '';
}
}
@@ -460,23 +461,13 @@ export class AST {
this.#uflag,
];
}
+ // We need to calculate the body *twice* if it's a repeat pattern
+ // at the start, once in nodot mode, then again in dot mode, so a
+ // pattern like *(?) can match 'x.y'
+ const repeated = this.type === '*' || this.type === '+';
// some kind of extglob
const start = this.type === '!' ? '(?:(?!(?:' : '(?:';
- const body = this.#parts
- .map(p => {
- // extglob ASTs should only contain parent ASTs
- /* c8 ignore start */
- if (typeof p === 'string') {
- throw new Error('string type in extglob ast??');
- }
- /* c8 ignore stop */
- // can ignore hasMagic, because extglobs are already always magic
- const [re, _, _hasMagic, uflag] = p.toRegExpSource();
- this.#uflag = this.#uflag || uflag;
- return re;
- })
- .filter(p => !(this.isStart() && this.isEnd()) || !!p)
- .join('|');
+ let body = this.#partsToRegExp(dot);
if (this.isStart() && this.isEnd() && !body && this.type !== '!') {
// invalid extglob, has to at least be *something* present, if it's
// the entire path portion.
@@ -486,22 +477,37 @@ export class AST {
this.#hasMagic = undefined;
return [s, unescape(this.toString()), false, false];
}
+ // XXX abstract out this map method
+ let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot
+ ? ''
+ : this.#partsToRegExp(true);
+ if (bodyDotAllowed === body) {
+ bodyDotAllowed = '';
+ }
+ if (bodyDotAllowed) {
+ body = `(?:${body})(?:${bodyDotAllowed})*?`;
+ }
// an empty !() is exactly equivalent to a starNoEmpty
let final = '';
if (this.type === '!' && this.#emptyExt) {
- final =
- (this.isStart() && !this.#options.dot ? startNoDot : '') + starNoEmpty;
+ final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty;
}
else {
const close = this.type === '!'
? // !() must match something,but !(x) can match ''
'))' +
- (this.isStart() && !this.#options.dot ? startNoDot : '') +
+ (this.isStart() && !dot && !allowDot ? startNoDot : '') +
star +
')'
: this.type === '@'
? ')'
- : `)${this.type}`;
+ : this.type === '?'
+ ? ')?'
+ : this.type === '+' && bodyDotAllowed
+ ? ')'
+ : this.type === '*' && bodyDotAllowed
+ ? `)?`
+ : `)${this.type}`;
final = start + body + close;
}
return [
@@ -511,6 +517,23 @@ export class AST {
this.#uflag,
];
}
+ #partsToRegExp(dot) {
+ return this.#parts
+ .map(p => {
+ // extglob ASTs should only contain parent ASTs
+ /* c8 ignore start */
+ if (typeof p === 'string') {
+ throw new Error('string type in extglob ast??');
+ }
+ /* c8 ignore stop */
+ // can ignore hasMagic, because extglobs are already always magic
+ const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
+ this.#uflag = this.#uflag || uflag;
+ return re;
+ })
+ .filter(p => !(this.isStart() && this.isEnd()) || !!p)
+ .join('|');
+ }
static #parseGlob(glob, hasMagic, noEmpty = false) {
let escaping = false;
let re = '';
diff --git a/deps/npm/node_modules/minimatch/package.json b/deps/npm/node_modules/minimatch/package.json
index d5ee74e334d6a4..061c3b9f343306 100644
--- a/deps/npm/node_modules/minimatch/package.json
+++ b/deps/npm/node_modules/minimatch/package.json
@@ -2,7 +2,7 @@
"author": "Isaac Z. Schlueter (http://blog.izs.me)",
"name": "minimatch",
"description": "a glob matcher in javascript",
- "version": "9.0.1",
+ "version": "9.0.3",
"repository": {
"type": "git",
"url": "git://github.com/isaacs/minimatch.git"
@@ -60,12 +60,12 @@
"devDependencies": {
"@types/brace-expansion": "^1.1.0",
"@types/node": "^18.15.11",
- "@types/tap": "^15.0.7",
+ "@types/tap": "^15.0.8",
"c8": "^7.12.0",
"eslint-config-prettier": "^8.6.0",
"mkdirp": "1",
"prettier": "^2.8.2",
- "tap": "^16.3.3",
+ "tap": "^16.3.7",
"ts-node": "^10.9.1",
"typedoc": "^0.23.21",
"typescript": "^4.9.3"
diff --git a/deps/npm/node_modules/semver/README.md b/deps/npm/node_modules/semver/README.md
index 33c762cb225dcb..043bdaed6b5fc3 100644
--- a/deps/npm/node_modules/semver/README.md
+++ b/deps/npm/node_modules/semver/README.md
@@ -159,7 +159,9 @@ of primitive `operators` is:
For example, the comparator `>=1.2.7` would match the versions
`1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6`
-or `1.1.0`.
+or `1.1.0`. The comparator `>1` is equivalent to `>=2.0.0` and
+would match the versions `2.0.0` and `3.1.0`, but not the versions
+`1.0.1` or `1.1.0`.
Comparators can be joined by whitespace to form a `comparator set`,
which is satisfied by the **intersection** of all of the comparators
diff --git a/deps/npm/node_modules/semver/classes/range.js b/deps/npm/node_modules/semver/classes/range.js
index 53c2540fd012ef..7e7c41410cbfdd 100644
--- a/deps/npm/node_modules/semver/classes/range.js
+++ b/deps/npm/node_modules/semver/classes/range.js
@@ -38,7 +38,7 @@ class Range {
this.set = this.raw
.split('||')
// map the range to a 2d array of comparators
- .map(r => this.parseRange(r))
+ .map(r => this.parseRange(r.trim()))
// throw out any comparator lists that are empty
// this generally means that it was not a valid range, which is allowed
// in loose mode, but will still throw if the WHOLE range is invalid.
@@ -98,15 +98,18 @@ class Range {
const hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE]
range = range.replace(hr, hyphenReplace(this.options.includePrerelease))
debug('hyphen replace', range)
+
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace)
debug('comparator trim', range)
// `~ 1.2.3` => `~1.2.3`
range = range.replace(re[t.TILDETRIM], tildeTrimReplace)
+ debug('tilde trim', range)
// `^ 1.2.3` => `^1.2.3`
range = range.replace(re[t.CARETTRIM], caretTrimReplace)
+ debug('caret trim', range)
// At this point, the range is completely trimmed and
// ready to be split into comparators.
diff --git a/deps/npm/node_modules/semver/internal/constants.js b/deps/npm/node_modules/semver/internal/constants.js
index 25fab1ea01233b..94be1c570277a5 100644
--- a/deps/npm/node_modules/semver/internal/constants.js
+++ b/deps/npm/node_modules/semver/internal/constants.js
@@ -9,6 +9,10 @@ const MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
// Max safe segment length for coercion.
const MAX_SAFE_COMPONENT_LENGTH = 16
+// Max safe length for a build identifier. The max length minus 6 characters for
+// the shortest version with a build 0.0.0+BUILD.
+const MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6
+
const RELEASE_TYPES = [
'major',
'premajor',
@@ -22,6 +26,7 @@ const RELEASE_TYPES = [
module.exports = {
MAX_LENGTH,
MAX_SAFE_COMPONENT_LENGTH,
+ MAX_SAFE_BUILD_LENGTH,
MAX_SAFE_INTEGER,
RELEASE_TYPES,
SEMVER_SPEC_VERSION,
diff --git a/deps/npm/node_modules/semver/internal/re.js b/deps/npm/node_modules/semver/internal/re.js
index f73ef1aa06263a..21150b3ec53b7d 100644
--- a/deps/npm/node_modules/semver/internal/re.js
+++ b/deps/npm/node_modules/semver/internal/re.js
@@ -1,4 +1,8 @@
-const { MAX_SAFE_COMPONENT_LENGTH } = require('./constants')
+const {
+ MAX_SAFE_COMPONENT_LENGTH,
+ MAX_SAFE_BUILD_LENGTH,
+ MAX_LENGTH,
+} = require('./constants')
const debug = require('./debug')
exports = module.exports = {}
@@ -9,16 +13,31 @@ const src = exports.src = []
const t = exports.t = {}
let R = 0
+const LETTERDASHNUMBER = '[a-zA-Z0-9-]'
+
+// Replace some greedy regex tokens to prevent regex dos issues. These regex are
+// used internally via the safeRe object since all inputs in this library get
+// normalized first to trim and collapse all extra whitespace. The original
+// regexes are exported for userland consumption and lower level usage. A
+// future breaking change could export the safer regex only with a note that
+// all input should have extra whitespace removed.
+const safeRegexReplacements = [
+ ['\\s', 1],
+ ['\\d', MAX_LENGTH],
+ [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH],
+]
+
+const makeSafeRegex = (value) => {
+ for (const [token, max] of safeRegexReplacements) {
+ value = value
+ .split(`${token}*`).join(`${token}{0,${max}}`)
+ .split(`${token}+`).join(`${token}{1,${max}}`)
+ }
+ return value
+}
+
const createToken = (name, value, isGlobal) => {
- // Replace all greedy whitespace to prevent regex dos issues. These regex are
- // used internally via the safeRe object since all inputs in this library get
- // normalized first to trim and collapse all extra whitespace. The original
- // regexes are exported for userland consumption and lower level usage. A
- // future breaking change could export the safer regex only with a note that
- // all input should have extra whitespace removed.
- const safe = value
- .split('\\s*').join('\\s{0,1}')
- .split('\\s+').join('\\s')
+ const safe = makeSafeRegex(value)
const index = R++
debug(name, index, value)
t[name] = index
@@ -34,13 +53,13 @@ const createToken = (name, value, isGlobal) => {
// A single `0`, or a non-zero digit followed by zero or more digits.
createToken('NUMERICIDENTIFIER', '0|[1-9]\\d*')
-createToken('NUMERICIDENTIFIERLOOSE', '[0-9]+')
+createToken('NUMERICIDENTIFIERLOOSE', '\\d+')
// ## Non-numeric Identifier
// Zero or more digits, followed by a letter or hyphen, and then zero or
// more letters, digits, or hyphens.
-createToken('NONNUMERICIDENTIFIER', '\\d*[a-zA-Z-][a-zA-Z0-9-]*')
+createToken('NONNUMERICIDENTIFIER', `\\d*[a-zA-Z-]${LETTERDASHNUMBER}*`)
// ## Main Version
// Three dot-separated numeric identifiers.
@@ -75,7 +94,7 @@ createToken('PRERELEASELOOSE', `(?:-?(${src[t.PRERELEASEIDENTIFIERLOOSE]
// ## Build Metadata Identifier
// Any combination of digits, letters, or hyphens.
-createToken('BUILDIDENTIFIER', '[0-9A-Za-z-]+')
+createToken('BUILDIDENTIFIER', `${LETTERDASHNUMBER}+`)
// ## Build Metadata
// Plus sign, followed by one or more period-separated build metadata
diff --git a/deps/npm/node_modules/semver/package.json b/deps/npm/node_modules/semver/package.json
index 7d0aff3c03c270..c145eca2f6d125 100644
--- a/deps/npm/node_modules/semver/package.json
+++ b/deps/npm/node_modules/semver/package.json
@@ -1,6 +1,6 @@
{
"name": "semver",
- "version": "7.5.2",
+ "version": "7.5.4",
"description": "The semantic version parser used by npm.",
"main": "index.js",
"scripts": {
@@ -14,7 +14,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.15.1",
+ "@npmcli/template-oss": "4.17.0",
"tap": "^16.0.0"
},
"license": "ISC",
@@ -53,7 +53,7 @@
"author": "GitHub Inc.",
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.15.1",
+ "version": "4.17.0",
"engines": ">=10",
"ciVersions": [
"10.0.0",
diff --git a/deps/npm/node_modules/supports-color/index.js b/deps/npm/node_modules/supports-color/index.js
index ca95e9f2202a6f..4ce0a2da8d2242 100644
--- a/deps/npm/node_modules/supports-color/index.js
+++ b/deps/npm/node_modules/supports-color/index.js
@@ -112,7 +112,7 @@ function _supportsColor(haveStream, {streamIsTTY, sniffFlags = true} = {}) {
}
if ('CI' in env) {
- if ('GITHUB_ACTIONS' in env) {
+ if ('GITHUB_ACTIONS' in env || 'GITEA_ACTIONS' in env) {
return 3;
}
diff --git a/deps/npm/node_modules/supports-color/package.json b/deps/npm/node_modules/supports-color/package.json
index eb6011c6bcdc64..738684722643c9 100644
--- a/deps/npm/node_modules/supports-color/package.json
+++ b/deps/npm/node_modules/supports-color/package.json
@@ -1,6 +1,6 @@
{
"name": "supports-color",
- "version": "9.3.1",
+ "version": "9.4.0",
"description": "Detect whether a terminal supports color",
"license": "MIT",
"repository": "chalk/supports-color",
@@ -20,7 +20,7 @@
},
"scripts": {
"//test": "xo && ava && tsd",
- "test": "xo && tsd"
+ "test": "tsd"
},
"files": [
"index.js",
@@ -51,11 +51,10 @@
"16m"
],
"devDependencies": {
- "@types/node": "^16.11.7",
- "ava": "^3.15.0",
+ "@types/node": "^20.3.2",
+ "ava": "^5.3.1",
"import-fresh": "^3.3.0",
"tsd": "^0.18.0",
- "typescript": "^4.4.3",
- "xo": "^0.49.0"
+ "xo": "^0.54.2"
}
}
diff --git a/deps/npm/package.json b/deps/npm/package.json
index c6ab8029946fd1..6e719a073893b0 100644
--- a/deps/npm/package.json
+++ b/deps/npm/package.json
@@ -1,5 +1,5 @@
{
- "version": "9.8.0",
+ "version": "9.8.1",
"name": "npm",
"description": "a package manager for JavaScript",
"workspaces": [
@@ -54,13 +54,15 @@
"@isaacs/string-locale-compare": "^1.1.0",
"@npmcli/arborist": "^6.3.0",
"@npmcli/config": "^6.2.1",
+ "@npmcli/fs": "^3.1.0",
"@npmcli/map-workspaces": "^3.0.4",
- "@npmcli/package-json": "^4.0.0",
+ "@npmcli/package-json": "^4.0.1",
+ "@npmcli/promise-spawn": "^6.0.2",
"@npmcli/run-script": "^6.0.2",
"abbrev": "^2.0.0",
"archy": "~1.0.0",
"cacache": "^17.1.3",
- "chalk": "^5.2.0",
+ "chalk": "^5.3.0",
"ci-info": "^3.8.0",
"cli-columns": "^4.0.0",
"cli-table3": "^0.6.3",
@@ -76,7 +78,7 @@
"json-parse-even-better-errors": "^3.0.0",
"libnpmaccess": "^7.0.2",
"libnpmdiff": "^5.0.19",
- "libnpmexec": "^6.0.2",
+ "libnpmexec": "^6.0.3",
"libnpmfund": "^4.0.19",
"libnpmhook": "^9.0.3",
"libnpmorg": "^5.0.4",
@@ -86,7 +88,7 @@
"libnpmteam": "^5.0.3",
"libnpmversion": "^4.0.2",
"make-fetch-happen": "^11.1.1",
- "minimatch": "^9.0.0",
+ "minimatch": "^9.0.3",
"minipass": "^5.0.0",
"minipass-pipeline": "^1.2.4",
"ms": "^2.1.2",
@@ -106,10 +108,10 @@
"proc-log": "^3.0.0",
"qrcode-terminal": "^0.12.0",
"read": "^2.1.0",
- "semver": "^7.5.2",
+ "semver": "^7.5.4",
"sigstore": "^1.7.0",
"ssri": "^10.0.4",
- "supports-color": "^9.3.1",
+ "supports-color": "^9.4.0",
"tar": "^6.1.15",
"text-table": "~0.2.0",
"tiny-relative-date": "^1.3.0",
@@ -122,8 +124,10 @@
"@isaacs/string-locale-compare",
"@npmcli/arborist",
"@npmcli/config",
+ "@npmcli/fs",
"@npmcli/map-workspaces",
"@npmcli/package-json",
+ "@npmcli/promise-spawn",
"@npmcli/run-script",
"abbrev",
"archy",
@@ -188,13 +192,11 @@
],
"devDependencies": {
"@npmcli/docs": "^1.0.0",
- "@npmcli/eslint-config": "^4.0.0",
- "@npmcli/fs": "^3.1.0",
+ "@npmcli/eslint-config": "^4.0.2",
"@npmcli/git": "^4.1.0",
"@npmcli/mock-globals": "^1.0.0",
"@npmcli/mock-registry": "^1.0.0",
- "@npmcli/promise-spawn": "^6.0.2",
- "@npmcli/template-oss": "4.14.1",
+ "@npmcli/template-oss": "4.18.0",
"@tufjs/repo-mock": "^1.3.1",
"diff": "^5.1.0",
"licensee": "^10.0.0",
@@ -247,7 +249,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.14.1",
+ "version": "4.18.0",
"content": "./scripts/template-oss/root.js"
},
"license": "Artistic-2.0",
diff --git a/deps/npm/tap-snapshots/test/lib/commands/publish.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/publish.js.test.cjs
index d88244d7a15460..7a5c2ddcc3882b 100644
--- a/deps/npm/tap-snapshots/test/lib/commands/publish.js.test.cjs
+++ b/deps/npm/tap-snapshots/test/lib/commands/publish.js.test.cjs
@@ -245,6 +245,17 @@ exports[`test/lib/commands/publish.js TAP no auth dry-run > must match snapshot
exports[`test/lib/commands/publish.js TAP no auth dry-run > warns about auth being needed 1`] = `
Array [
+ Array [
+ "publish",
+ "npm auto-corrected some errors in your package.json when publishing. Please run \\"npm pkg fix\\" to address these errors.",
+ ],
+ Array [
+ "publish",
+ String(
+ errors corrected:
+ Removed invalid "scripts"
+ ),
+ ],
Array [
"",
"This command requires you to be logged in to https://registry.npmjs.org/ (dry-run)",
@@ -416,6 +427,53 @@ exports[`test/lib/commands/publish.js TAP workspaces all workspaces - color > al
exports[`test/lib/commands/publish.js TAP workspaces all workspaces - color > warns about skipped private workspace in color 1`] = `
Array [
+ Array [
+ "publish",
+ "npm auto-corrected some errors in your package.json when publishing. Please run \\"npm pkg fix\\" to address these errors.",
+ ],
+ Array [
+ "publish",
+ String(
+ errors corrected:
+ Removed invalid "scripts"
+ "repository" was changed from a string to an object
+ ),
+ ],
+ Array [
+ "publish",
+ "npm auto-corrected some errors in your package.json when publishing. Please run \\"npm pkg fix\\" to address these errors.",
+ ],
+ Array [
+ "publish",
+ String(
+ errors corrected:
+ Removed invalid "scripts"
+ "repository" was changed from a string to an object
+ "repository.url" was normalized to "git+https://github.com/npm/workspace-b.git"
+ ),
+ ],
+ Array [
+ "publish",
+ "npm auto-corrected some errors in your package.json when publishing. Please run \\"npm pkg fix\\" to address these errors.",
+ ],
+ Array [
+ "publish",
+ String(
+ errors corrected:
+ Removed invalid "scripts"
+ ),
+ ],
+ Array [
+ "publish",
+ "npm auto-corrected some errors in your package.json when publishing. Please run \\"npm pkg fix\\" to address these errors.",
+ ],
+ Array [
+ "publish",
+ String(
+ errors corrected:
+ Removed invalid "scripts"
+ ),
+ ],
Array [
"publish",
"Skipping workspace \\u001b[32mworkspace-p\\u001b[39m, marked as \\u001b[1mprivate\\u001b[22m",
@@ -431,6 +489,53 @@ exports[`test/lib/commands/publish.js TAP workspaces all workspaces - no color >
exports[`test/lib/commands/publish.js TAP workspaces all workspaces - no color > warns about skipped private workspace 1`] = `
Array [
+ Array [
+ "publish",
+ "npm auto-corrected some errors in your package.json when publishing. Please run \\"npm pkg fix\\" to address these errors.",
+ ],
+ Array [
+ "publish",
+ String(
+ errors corrected:
+ Removed invalid "scripts"
+ "repository" was changed from a string to an object
+ ),
+ ],
+ Array [
+ "publish",
+ "npm auto-corrected some errors in your package.json when publishing. Please run \\"npm pkg fix\\" to address these errors.",
+ ],
+ Array [
+ "publish",
+ String(
+ errors corrected:
+ Removed invalid "scripts"
+ "repository" was changed from a string to an object
+ "repository.url" was normalized to "git+https://github.com/npm/workspace-b.git"
+ ),
+ ],
+ Array [
+ "publish",
+ "npm auto-corrected some errors in your package.json when publishing. Please run \\"npm pkg fix\\" to address these errors.",
+ ],
+ Array [
+ "publish",
+ String(
+ errors corrected:
+ Removed invalid "scripts"
+ ),
+ ],
+ Array [
+ "publish",
+ "npm auto-corrected some errors in your package.json when publishing. Please run \\"npm pkg fix\\" to address these errors.",
+ ],
+ Array [
+ "publish",
+ String(
+ errors corrected:
+ Removed invalid "scripts"
+ ),
+ ],
Array [
"publish",
"Skipping workspace workspace-p, marked as private",
diff --git a/deps/npm/test/lib/commands/exec.js b/deps/npm/test/lib/commands/exec.js
index 2fd11f40379f1f..07a3e6ebd8ed95 100644
--- a/deps/npm/test/lib/commands/exec.js
+++ b/deps/npm/test/lib/commands/exec.js
@@ -129,3 +129,38 @@ t.test('workspaces', async t => {
const exists = await fs.stat(path.join(npm.prefix, 'workspace-a', 'npm-exec-test-success'))
t.ok(exists.isFile(), 'bin ran, creating file inside workspace')
})
+
+t.test('npx --no-install @npmcli/npx-test', async t => {
+ const registry = new MockRegistry({
+ tap: t,
+ registry: 'https://registry.npmjs.org/',
+ })
+
+ const manifest = registry.manifest({ name: '@npmcli/npx-test' })
+ manifest.versions['1.0.0'].bin = { 'npx-test': 'index.js' }
+
+ const { npm } = await loadMockNpm(t, {
+ config: {
+ audit: false,
+ yes: false,
+ },
+ prefixDir: {
+ 'npm-exec-test': {
+ 'package.json': JSON.stringify(manifest),
+ 'index.js': `#!/usr/bin/env node
+ require('fs').writeFileSync('npm-exec-test-success', '')`,
+ },
+ },
+ })
+
+ try {
+ await npm.exec('exec', ['@npmcli/npx-test'])
+ t.fail('Expected error was not thrown')
+ } catch (error) {
+ t.match(
+ error.message,
+ 'npx canceled due to missing packages and no YES option: ',
+ 'Expected error message thrown'
+ )
+ }
+})
From 65d23940bf13fcc5f9e1030b50006289a9ac7e98 Mon Sep 17 00:00:00 2001
From: Raz Luvaton <16746759+rluvaton@users.noreply.github.com>
Date: Sun, 23 Jul 2023 13:22:35 +0300
Subject: [PATCH 041/157] test_runner: fix async callback in describe not
awaited
PR-URL: https://github.com/nodejs/node/pull/48856
Reviewed-By: Moshe Atlow
Reviewed-By: Chemi Atlow
---
lib/internal/test_runner/test.js | 21 ++++++----
.../test-runner/output/describe_it.js | 19 +++++++++
.../test-runner/output/describe_it.snapshot | 39 ++++++++++++++++---
3 files changed, 67 insertions(+), 12 deletions(-)
diff --git a/lib/internal/test_runner/test.js b/lib/internal/test_runner/test.js
index 9a333e1457fbc0..becb4c07d7cf83 100644
--- a/lib/internal/test_runner/test.js
+++ b/lib/internal/test_runner/test.js
@@ -13,6 +13,7 @@ const {
ObjectSeal,
PromisePrototypeThen,
PromiseResolve,
+ SafePromisePrototypeFinally,
ReflectApply,
RegExpPrototypeExec,
SafeMap,
@@ -789,17 +790,23 @@ class Suite extends Test {
const { ctx, args } = this.getRunArgs();
const runArgs = [this.fn, ctx];
ArrayPrototypePushApply(runArgs, args);
- this.buildSuite = PromisePrototypeThen(
- PromiseResolve(ReflectApply(this.runInAsyncScope, this, runArgs)),
- undefined,
- (err) => {
- this.fail(new ERR_TEST_FAILURE(err, kTestCodeFailure));
- });
+ this.buildSuite = SafePromisePrototypeFinally(
+ PromisePrototypeThen(
+ PromiseResolve(ReflectApply(this.runInAsyncScope, this, runArgs)),
+ undefined,
+ (err) => {
+ this.fail(new ERR_TEST_FAILURE(err, kTestCodeFailure));
+ }),
+ () => {
+ this.buildPhaseFinished = true;
+ },
+ );
} catch (err) {
this.fail(new ERR_TEST_FAILURE(err, kTestCodeFailure));
+
+ this.buildPhaseFinished = true;
}
this.fn = () => {};
- this.buildPhaseFinished = true;
}
getRunArgs() {
diff --git a/test/fixtures/test-runner/output/describe_it.js b/test/fixtures/test-runner/output/describe_it.js
index c6d3f9c1b72fb3..0b89e1a11112b1 100644
--- a/test/fixtures/test-runner/output/describe_it.js
+++ b/test/fixtures/test-runner/output/describe_it.js
@@ -375,3 +375,22 @@ describe('rejected thenable', () => {
},
};
});
+
+describe("async describe function", async () => {
+ await null;
+
+ await it("it inside describe 1", async () => {
+ await null
+ });
+ await it("it inside describe 2", async () => {
+ await null;
+ });
+
+ describe("inner describe", async () => {
+ await null;
+
+ it("it inside inner describe", async () => {
+ await null;
+ });
+ });
+});
diff --git a/test/fixtures/test-runner/output/describe_it.snapshot b/test/fixtures/test-runner/output/describe_it.snapshot
index e085ff9535ec70..c5b4610d522e43 100644
--- a/test/fixtures/test-runner/output/describe_it.snapshot
+++ b/test/fixtures/test-runner/output/describe_it.snapshot
@@ -635,8 +635,37 @@ not ok 59 - rejected thenable
stack: |-
*
...
+# Subtest: async describe function
+ # Subtest: it inside describe 1
+ ok 1 - it inside describe 1
+ ---
+ duration_ms: *
+ ...
+ # Subtest: it inside describe 2
+ ok 2 - it inside describe 2
+ ---
+ duration_ms: *
+ ...
+ # Subtest: inner describe
+ # Subtest: it inside inner describe
+ ok 1 - it inside inner describe
+ ---
+ duration_ms: *
+ ...
+ 1..1
+ ok 3 - inner describe
+ ---
+ duration_ms: *
+ type: 'suite'
+ ...
+ 1..3
+ok 60 - async describe function
+ ---
+ duration_ms: *
+ type: 'suite'
+ ...
# Subtest: invalid subtest fail
-not ok 60 - invalid subtest fail
+not ok 61 - invalid subtest fail
---
duration_ms: *
failureType: 'parentAlreadyFinished'
@@ -645,16 +674,16 @@ not ok 60 - invalid subtest fail
stack: |-
*
...
-1..60
+1..61
# Warning: Test "unhandled rejection - passes but warns" generated asynchronous activity after the test ended. This activity created the error "Error: rejected from unhandled rejection fail" and would have caused the test to fail, but instead triggered an unhandledRejection event.
# Warning: Test "async unhandled rejection - passes but warns" generated asynchronous activity after the test ended. This activity created the error "Error: rejected from async unhandled rejection fail" and would have caused the test to fail, but instead triggered an unhandledRejection event.
# Warning: Test "immediate throw - passes but warns" generated asynchronous activity after the test ended. This activity created the error "Error: thrown from immediate throw fail" and would have caused the test to fail, but instead triggered an uncaughtException event.
# Warning: Test "immediate reject - passes but warns" generated asynchronous activity after the test ended. This activity created the error "Error: rejected from immediate reject fail" and would have caused the test to fail, but instead triggered an unhandledRejection event.
# Warning: Test "callback called twice in different ticks" generated asynchronous activity after the test ended. This activity created the error "Error [ERR_TEST_FAILURE]: callback invoked multiple times" and would have caused the test to fail, but instead triggered an uncaughtException event.
# Warning: Test "callback async throw after done" generated asynchronous activity after the test ended. This activity created the error "Error: thrown from callback async throw after done" and would have caused the test to fail, but instead triggered an uncaughtException event.
-# tests 67
-# suites 9
-# pass 29
+# tests 70
+# suites 11
+# pass 32
# fail 19
# cancelled 4
# skipped 10
From 53ee98566b3a429368e243e57caa5f94d901fe97 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?=
Date: Sun, 23 Jul 2023 21:35:38 +0200
Subject: [PATCH 042/157] permission: move PrintTree into unnamed namespace
This function is not declared outside of fs_permission.cc and thus
should not be visible outside the file during the linking stage.
PR-URL: https://github.com/nodejs/node/pull/48874
Reviewed-By: Rafael Gonzaga
Reviewed-By: Debadree Chatterjee
---
src/permission/fs_permission.cc | 57 +++++++++++++++++----------------
1 file changed, 29 insertions(+), 28 deletions(-)
diff --git a/src/permission/fs_permission.cc b/src/permission/fs_permission.cc
index a02f95ea55f3a2..91c63dff6582a8 100644
--- a/src/permission/fs_permission.cc
+++ b/src/permission/fs_permission.cc
@@ -67,52 +67,53 @@ bool is_tree_granted(node::permission::FSPermission::RadixTree* granted_tree,
return granted_tree->Lookup(param, true);
}
-} // namespace
-
-namespace node {
-
-namespace permission {
-
-void PrintTree(const FSPermission::RadixTree::Node* node, size_t spaces = 0) {
+void PrintTree(const node::permission::FSPermission::RadixTree::Node* node,
+ size_t spaces = 0) {
std::string whitespace(spaces, ' ');
if (node == nullptr) {
return;
}
if (node->wildcard_child != nullptr) {
- per_process::Debug(DebugCategory::PERMISSION_MODEL,
- "%s Wildcard: %s\n",
- whitespace,
- node->prefix);
+ node::per_process::Debug(node::DebugCategory::PERMISSION_MODEL,
+ "%s Wildcard: %s\n",
+ whitespace,
+ node->prefix);
} else {
- per_process::Debug(DebugCategory::PERMISSION_MODEL,
- "%s Prefix: %s\n",
- whitespace,
- node->prefix);
+ node::per_process::Debug(node::DebugCategory::PERMISSION_MODEL,
+ "%s Prefix: %s\n",
+ whitespace,
+ node->prefix);
if (node->children.size()) {
size_t child = 0;
for (const auto& pair : node->children) {
++child;
- per_process::Debug(DebugCategory::PERMISSION_MODEL,
- "%s Child(%s): %s\n",
- whitespace,
- child,
- std::string(1, pair.first));
+ node::per_process::Debug(node::DebugCategory::PERMISSION_MODEL,
+ "%s Child(%s): %s\n",
+ whitespace,
+ child,
+ std::string(1, pair.first));
PrintTree(pair.second, spaces + 2);
}
- per_process::Debug(DebugCategory::PERMISSION_MODEL,
- "%s End of tree - child(%s)\n",
- whitespace,
- child);
+ node::per_process::Debug(node::DebugCategory::PERMISSION_MODEL,
+ "%s End of tree - child(%s)\n",
+ whitespace,
+ child);
} else {
- per_process::Debug(DebugCategory::PERMISSION_MODEL,
- "%s End of tree: %s\n",
- whitespace,
- node->prefix);
+ node::per_process::Debug(node::DebugCategory::PERMISSION_MODEL,
+ "%s End of tree: %s\n",
+ whitespace,
+ node->prefix);
}
}
}
+} // namespace
+
+namespace node {
+
+namespace permission {
+
// allow = '*'
// allow = '/tmp/,/home/example.js'
void FSPermission::Apply(const std::string& allow, PermissionScope scope) {
From 015c4f788d67a36ecb27fe698aa2d0e618a46db9 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?=
Date: Sun, 23 Jul 2023 21:51:38 +0200
Subject: [PATCH 043/157] node-api: avoid macro redefinition
Even though the redefinition complies with the C standard because the
second definition is "effectively the same" as the first definition,
it's best to avoid any redefinition.
Refs: https://github.com/nodejs/node/pull/28237
Refs: https://github.com/nodejs/node/pull/30006
PR-URL: https://github.com/nodejs/node/pull/48879
Reviewed-By: Michael Dawson
Reviewed-By: Vladimir Morozov
Reviewed-By: Luigi Pinca
---
src/js_native_api_v8.h | 8 --------
1 file changed, 8 deletions(-)
diff --git a/src/js_native_api_v8.h b/src/js_native_api_v8.h
index f7646778311bfd..4f0fb8b3d152f7 100644
--- a/src/js_native_api_v8.h
+++ b/src/js_native_api_v8.h
@@ -268,14 +268,6 @@ inline napi_status napi_set_last_error(napi_env env,
} \
} while (0)
-#define RETURN_STATUS_IF_FALSE_WITH_PREAMBLE(env, condition, status) \
- do { \
- if (!(condition)) { \
- return napi_set_last_error( \
- (env), try_catch.HasCaught() ? napi_pending_exception : (status)); \
- } \
- } while (0)
-
#define CHECK_MAYBE_EMPTY_WITH_PREAMBLE(env, maybe, status) \
RETURN_STATUS_IF_FALSE_WITH_PREAMBLE((env), !((maybe).IsEmpty()), (status))
From b12c3b5240768e29a5dbe7af7b10306a5cd5d2ad Mon Sep 17 00:00:00 2001
From: Antoine du Hamel
Date: Mon, 24 Jul 2023 09:47:47 +0200
Subject: [PATCH 044/157] url: ensure getter access do not mutate observable
symbols
PR-URL: https://github.com/nodejs/node/pull/48897
Refs: https://github.com/nodejs/node/pull/48891
Refs: https://github.com/nodejs/node/issues/48886
Reviewed-By: Yagiz Nizipli
Reviewed-By: Moshe Atlow
---
test/parallel/test-whatwg-url-custom-searchparams.js | 2 ++
1 file changed, 2 insertions(+)
diff --git a/test/parallel/test-whatwg-url-custom-searchparams.js b/test/parallel/test-whatwg-url-custom-searchparams.js
index 0b2087ac246313..75fa1779bdeb45 100644
--- a/test/parallel/test-whatwg-url-custom-searchparams.js
+++ b/test/parallel/test-whatwg-url-custom-searchparams.js
@@ -16,7 +16,9 @@ const normalizedValues = ['a', '1', 'true', 'undefined', 'null', '\uFFFD',
'[object Object]'];
const m = new URL('http://example.org');
+const ownSymbolsBeforeGetterAccess = Object.getOwnPropertySymbols(m);
const sp = m.searchParams;
+assert.deepStrictEqual(Object.getOwnPropertySymbols(m), ownSymbolsBeforeGetterAccess);
assert(sp);
assert.strictEqual(sp.toString(), '');
From 045e3c549a28ac913b60adb77d3747603e47dad9 Mon Sep 17 00:00:00 2001
From: Michael Dawson
Date: Mon, 24 Jul 2023 15:44:26 -0400
Subject: [PATCH 045/157] doc: add ver of 18.x where Node-api 9 is supported
Signed-off-by: Michael Dawson
PR-URL: https://github.com/nodejs/node/pull/48876
Reviewed-By: Luigi Pinca
Reviewed-By: Chengzhong Wu
---
doc/api/n-api.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/api/n-api.md b/doc/api/n-api.md
index 10f19ee2f8cc65..786df00f9a7960 100644
--- a/doc/api/n-api.md
+++ b/doc/api/n-api.md
@@ -259,7 +259,7 @@ information is in the latest API documentation in:
9
-
20.3.0+, 21.0.0 and all later versions
+
v18.17.0+, 20.3.0+, 21.0.0 and all later versions
8
From 742597b14a1e95f35830d948918dcecb49012097 Mon Sep 17 00:00:00 2001
From: Paolo Insogna
Date: Mon, 24 Jul 2023 22:55:19 +0200
Subject: [PATCH 046/157] http: start connections checking interval on listen
Co-authored-by: Luigi Pinca
PR-URL: https://github.com/nodejs/node/pull/48611
Reviewed-By: Matteo Collina
Reviewed-By: Marco Ippolito
Reviewed-By: Luigi Pinca
Reviewed-By: Rafael Gonzaga
---
lib/_http_server.js | 21 ++++++++++----
lib/https.js | 3 +-
...t-http-server-connections-checking-leak.js | 24 +++++++++++++++
...-https-server-connections-checking-leak.js | 29 +++++++++++++++++++
4 files changed, 71 insertions(+), 6 deletions(-)
create mode 100644 test/parallel/test-http-server-connections-checking-leak.js
create mode 100644 test/parallel/test-https-server-connections-checking-leak.js
diff --git a/lib/_http_server.js b/lib/_http_server.js
index 0242e7a089dd6f..c62ea17599512f 100644
--- a/lib/_http_server.js
+++ b/lib/_http_server.js
@@ -500,14 +500,16 @@ function storeHTTPOptions(options) {
}
}
-function setupConnectionsTracking(server) {
+function setupConnectionsTracking() {
// Start connection handling
- server[kConnections] = new ConnectionsList();
+ if (!this[kConnections]) {
+ this[kConnections] = new ConnectionsList();
+ }
// This checker is started without checking whether any headersTimeout or requestTimeout is non zero
// otherwise it would not be started if such timeouts are modified after createServer.
- server[kConnectionsCheckingInterval] =
- setInterval(checkConnections.bind(server), server.connectionsCheckingInterval).unref();
+ this[kConnectionsCheckingInterval] =
+ setInterval(checkConnections.bind(this), this.connectionsCheckingInterval).unref();
}
function httpServerPreClose(server) {
@@ -545,11 +547,12 @@ function Server(options, requestListener) {
this.httpAllowHalfOpen = false;
this.on('connection', connectionListener);
+ this.on('listening', setupConnectionsTracking);
this.timeout = 0;
this.maxHeadersCount = null;
this.maxRequestsPerSocket = 0;
- setupConnectionsTracking(this);
+
this[kUniqueHeaders] = parseUniqueHeadersOption(options.uniqueHeaders);
}
ObjectSetPrototypeOf(Server.prototype, net.Server.prototype);
@@ -565,6 +568,10 @@ Server.prototype[SymbolAsyncDispose] = async function() {
};
Server.prototype.closeAllConnections = function() {
+ if (!this[kConnections]) {
+ return;
+ }
+
const connections = this[kConnections].all();
for (let i = 0, l = connections.length; i < l; i++) {
@@ -573,6 +580,10 @@ Server.prototype.closeAllConnections = function() {
};
Server.prototype.closeIdleConnections = function() {
+ if (!this[kConnections]) {
+ return;
+ }
+
const connections = this[kConnections].idle();
for (let i = 0, l = connections.length; i < l; i++) {
diff --git a/lib/https.js b/lib/https.js
index d8b42c85493f7e..70ffa73ff1996b 100644
--- a/lib/https.js
+++ b/lib/https.js
@@ -96,8 +96,9 @@ function Server(opts, requestListener) {
this.timeout = 0;
this.maxHeadersCount = null;
- setupConnectionsTracking(this);
+ this.on('listening', setupConnectionsTracking);
}
+
ObjectSetPrototypeOf(Server.prototype, tls.Server.prototype);
ObjectSetPrototypeOf(Server, tls.Server);
diff --git a/test/parallel/test-http-server-connections-checking-leak.js b/test/parallel/test-http-server-connections-checking-leak.js
new file mode 100644
index 00000000000000..e28cf117c65f87
--- /dev/null
+++ b/test/parallel/test-http-server-connections-checking-leak.js
@@ -0,0 +1,24 @@
+'use strict';
+
+// Flags: --expose-gc
+
+// Check that creating a server without listening does not leak resources.
+
+require('../common');
+const onGC = require('../common/ongc');
+const Countdown = require('../common/countdown');
+
+const http = require('http');
+const max = 100;
+
+// Note that Countdown internally calls common.mustCall, that's why it's not done here.
+const countdown = new Countdown(max, () => {});
+
+for (let i = 0; i < max; i++) {
+ const server = http.createServer((req, res) => {});
+ onGC(server, { ongc: countdown.dec.bind(countdown) });
+}
+
+setImmediate(() => {
+ global.gc();
+});
diff --git a/test/parallel/test-https-server-connections-checking-leak.js b/test/parallel/test-https-server-connections-checking-leak.js
new file mode 100644
index 00000000000000..3e7c45e4660ed4
--- /dev/null
+++ b/test/parallel/test-https-server-connections-checking-leak.js
@@ -0,0 +1,29 @@
+'use strict';
+
+// Flags: --expose-gc
+
+// Check that creating a server without listening does not leak resources.
+
+const common = require('../common');
+
+if (!common.hasCrypto) {
+ common.skip('missing crypto');
+}
+
+const onGC = require('../common/ongc');
+const Countdown = require('../common/countdown');
+
+const https = require('https');
+const max = 100;
+
+// Note that Countdown internally calls common.mustCall, that's why it's not done here.
+const countdown = new Countdown(max, () => {});
+
+for (let i = 0; i < max; i++) {
+ const server = https.createServer((req, res) => {});
+ onGC(server, { ongc: countdown.dec.bind(countdown) });
+}
+
+setImmediate(() => {
+ global.gc();
+});
From 0cd2393bd9af11c396e01f068fd77e666946f5fe Mon Sep 17 00:00:00 2001
From: "Node.js GitHub Bot"
Date: Tue, 25 Jul 2023 01:56:53 +0100
Subject: [PATCH 047/157] tools: update lint-md-dependencies to rollup@3.26.3
PR-URL: https://github.com/nodejs/node/pull/48888
Reviewed-By: Luigi Pinca
Reviewed-By: Moshe Atlow
Reviewed-By: Rich Trott
---
tools/lint-md/package-lock.json | 8 ++++----
tools/lint-md/package.json | 2 +-
2 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/tools/lint-md/package-lock.json b/tools/lint-md/package-lock.json
index b783d977e6d5da..11659dfec5197b 100644
--- a/tools/lint-md/package-lock.json
+++ b/tools/lint-md/package-lock.json
@@ -18,7 +18,7 @@
"devDependencies": {
"@rollup/plugin-commonjs": "^25.0.3",
"@rollup/plugin-node-resolve": "^15.1.0",
- "rollup": "^3.26.2",
+ "rollup": "^3.26.3",
"rollup-plugin-cleanup": "^3.2.1"
}
},
@@ -2230,9 +2230,9 @@
}
},
"node_modules/rollup": {
- "version": "3.26.2",
- "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.26.2.tgz",
- "integrity": "sha512-6umBIGVz93er97pMgQO08LuH3m6PUb3jlDUUGFsNJB6VgTCUaDFpupf5JfU30529m/UKOgmiX+uY6Sx8cOYpLA==",
+ "version": "3.26.3",
+ "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.26.3.tgz",
+ "integrity": "sha512-7Tin0C8l86TkpcMtXvQu6saWH93nhG3dGQ1/+l5V2TDMceTxO7kDiK6GzbfLWNNxqJXm591PcEZUozZm51ogwQ==",
"dev": true,
"bin": {
"rollup": "dist/bin/rollup"
diff --git a/tools/lint-md/package.json b/tools/lint-md/package.json
index 63c117a03d3783..cbf5f6c312040b 100644
--- a/tools/lint-md/package.json
+++ b/tools/lint-md/package.json
@@ -16,7 +16,7 @@
"devDependencies": {
"@rollup/plugin-commonjs": "^25.0.3",
"@rollup/plugin-node-resolve": "^15.1.0",
- "rollup": "^3.26.2",
+ "rollup": "^3.26.3",
"rollup-plugin-cleanup": "^3.2.1"
}
}
From 5e4730858da24d73cc24acb72a75eb7140961cee Mon Sep 17 00:00:00 2001
From: Guido Penta <119898224+GuidoPenta@users.noreply.github.com>
Date: Tue, 25 Jul 2023 09:19:44 +0200
Subject: [PATCH 048/157] doc: improve requireHostHeader
PR-URL: https://github.com/nodejs/node/pull/48860
Reviewed-By: Paolo Insogna
Reviewed-By: Matteo Collina
Reviewed-By: Marco Ippolito
Reviewed-By: Luigi Pinca
---
doc/api/http.md | 7 ++++---
1 file changed, 4 insertions(+), 3 deletions(-)
diff --git a/doc/api/http.md b/doc/api/http.md
index a0437f329bb148..490b6bd1579172 100644
--- a/doc/api/http.md
+++ b/doc/api/http.md
@@ -3282,9 +3282,10 @@ changes:
the entire request from the client.
See [`server.requestTimeout`][] for more information.
**Default:** `300000`.
- * `requireHostHeader` {boolean} It forces the server to respond with
- a 400 (Bad Request) status code to any HTTP/1.1 request message
- that lacks a Host header (as mandated by the specification).
+ * `requireHostHeader` {boolean} If set to `true`, it forces the server to
+ respond with a 400 (Bad Request) status code to any HTTP/1.1
+ request message that lacks a Host header
+ (as mandated by the specification).
**Default:** `true`.
* `ServerResponse` {http.ServerResponse} Specifies the `ServerResponse` class
to be used. Useful for extending the original `ServerResponse`. **Default:**
From 092f9fe92a6baacf3fdb72d4c9751efb476969e5 Mon Sep 17 00:00:00 2001
From: Ardi_Nugraha <33378542+0xArdi-N@users.noreply.github.com>
Date: Tue, 25 Jul 2023 20:34:16 +0700
Subject: [PATCH 049/157] doc: change duration to duration_ms on test
documentation
PR-URL: https://github.com/nodejs/node/pull/48892
Fixes: https://github.com/nodejs/node/issues/48887
Reviewed-By: Antoine du Hamel
Reviewed-By: Luigi Pinca
Reviewed-By: Moshe Atlow
Reviewed-By: Colin Ihrig
Reviewed-By: Chemi Atlow
---
doc/api/test.md | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/doc/api/test.md b/doc/api/test.md
index e86e8999316b20..4b0c298e7f21b4 100644
--- a/doc/api/test.md
+++ b/doc/api/test.md
@@ -2029,7 +2029,7 @@ Emitted when a test is enqueued for execution.
* `data` {Object}
* `details` {Object} Additional execution metadata.
- * `duration` {number} The duration of the test in milliseconds.
+ * `duration_ms` {number} The duration of the test in milliseconds.
* `error` {Error} An error wrapping the error thrown by the test.
* `cause` {Error} The actual error thrown by the test.
* `file` {string|undefined} The path of the test file,
@@ -2046,7 +2046,7 @@ Emitted when a test fails.
* `data` {Object}
* `details` {Object} Additional execution metadata.
- * `duration` {number} The duration of the test in milliseconds.
+ * `duration_ms` {number} The duration of the test in milliseconds.
* `file` {string|undefined} The path of the test file,
`undefined` if test was run through the REPL.
* `name` {string} The test name.
From 4b0e50501eb358561aa596d5d9e5c5fb50c9d915 Mon Sep 17 00:00:00 2001
From: "Node.js GitHub Bot"
Date: Tue, 25 Jul 2023 20:33:01 +0100
Subject: [PATCH 050/157] deps: update ada to 2.6.0
PR-URL: https://github.com/nodejs/node/pull/48896
Reviewed-By: Yagiz Nizipli
Reviewed-By: Antoine du Hamel
Reviewed-By: Luigi Pinca
Reviewed-By: Jiawen Geng
---
deps/ada/LICENSE-MIT | 2 +-
deps/ada/ada.cpp | 219 +++++-----
deps/ada/ada.h | 405 ++++++++++++++++--
deps/ada/ada_c.h | 4 +
.../maintaining/maintaining-dependencies.md | 6 +-
5 files changed, 485 insertions(+), 151 deletions(-)
diff --git a/deps/ada/LICENSE-MIT b/deps/ada/LICENSE-MIT
index 9f3e50e0db7856..bd2abacfc1dd51 100644
--- a/deps/ada/LICENSE-MIT
+++ b/deps/ada/LICENSE-MIT
@@ -1,4 +1,4 @@
-Copyright 2023 Ada authors
+Copyright 2023 Yagiz Nizipli and Daniel Lemire
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
diff --git a/deps/ada/ada.cpp b/deps/ada/ada.cpp
index 855d5b9360392d..ce4e6302acbe87 100644
--- a/deps/ada/ada.cpp
+++ b/deps/ada/ada.cpp
@@ -1,4 +1,4 @@
-/* auto-generated on 2023-06-03 12:40:57 -0400. Do not edit! */
+/* auto-generated on 2023-07-23 15:03:22 -0400. Do not edit! */
/* begin file src/ada.cpp */
#include "ada.h"
/* begin file src/checkers.cpp */
@@ -2829,8 +2829,6 @@ std::u32string map(std::u32string_view input) {
break;
case 2:
return error; // disallowed
- break;
-
// case 3 :
default:
// We have a mapping
@@ -7750,7 +7748,7 @@ const char32_t composition_data[1883] = {
namespace ada::idna {
// See
-// https://github.composition_count/uni-algo/uni-algo/blob/c612968c5ed3ace39bde4c894c24286c5f2c7fe2/include/uni_algo/impl/impl_norm.h#L467
+// https://github.com/uni-algo/uni-algo/blob/c612968c5ed3ace39bde4c894c24286c5f2c7fe2/include/uni_algo/impl/impl_norm.h#L467
constexpr char32_t hangul_sbase = 0xAC00;
constexpr char32_t hangul_tbase = 0x11A7;
constexpr char32_t hangul_vbase = 0x1161;
@@ -9794,10 +9792,11 @@ ADA_POP_DISABLE_WARNINGS
namespace ada::unicode {
+constexpr uint64_t broadcast(uint8_t v) noexcept {
+ return 0x101010101010101ull * v;
+}
+
constexpr bool to_lower_ascii(char* input, size_t length) noexcept {
- auto broadcast = [](uint8_t v) -> uint64_t {
- return 0x101010101010101ull * v;
- };
uint64_t broadcast_80 = broadcast(0x80);
uint64_t broadcast_Ap = broadcast(128 - 'A');
uint64_t broadcast_Zp = broadcast(128 - 'Z' - 1);
@@ -9862,9 +9861,9 @@ ada_really_inline bool has_tabs_or_newline(
_mm_cmpeq_epi8(word, mask3));
}
if (i < user_input.size()) {
- uint8_t buffer[16]{};
+ alignas(16) uint8_t buffer[16]{};
memcpy(buffer, user_input.data() + i, user_input.size() - i);
- __m128i word = _mm_loadu_si128((const __m128i*)buffer);
+ __m128i word = _mm_load_si128((const __m128i*)buffer);
running = _mm_or_si128(
_mm_or_si128(running, _mm_or_si128(_mm_cmpeq_epi8(word, mask1),
_mm_cmpeq_epi8(word, mask2))),
@@ -9878,9 +9877,6 @@ ada_really_inline bool has_tabs_or_newline(
auto has_zero_byte = [](uint64_t v) {
return ((v - 0x0101010101010101) & ~(v)&0x8080808080808080);
};
- auto broadcast = [](uint8_t v) -> uint64_t {
- return 0x101010101010101ull * v;
- };
size_t i = 0;
uint64_t mask1 = broadcast('\r');
uint64_t mask2 = broadcast('\n');
@@ -10542,8 +10538,8 @@ ada_really_inline bool shorten_path(std::string& path,
ada::scheme::type type) noexcept {
size_t first_delimiter = path.find_first_of('/', 1);
- // Let path be url’s path.
- // If url’s scheme is "file", path’s size is 1, and path[0] is a normalized
+ // Let path be url's path.
+ // If url's scheme is "file", path's size is 1, and path[0] is a normalized
// Windows drive letter, then return.
if (type == ada::scheme::type::FILE &&
first_delimiter == std::string_view::npos && !path.empty()) {
@@ -10553,7 +10549,7 @@ ada_really_inline bool shorten_path(std::string& path,
}
}
- // Remove path’s last item, if any.
+ // Remove path's last item, if any.
size_t last_delimiter = path.rfind('/');
if (last_delimiter != std::string::npos) {
path.erase(last_delimiter);
@@ -10567,8 +10563,8 @@ ada_really_inline bool shorten_path(std::string_view& path,
ada::scheme::type type) noexcept {
size_t first_delimiter = path.find_first_of('/', 1);
- // Let path be url’s path.
- // If url’s scheme is "file", path’s size is 1, and path[0] is a normalized
+ // Let path be url's path.
+ // If url's scheme is "file", path's size is 1, and path[0] is a normalized
// Windows drive letter, then return.
if (type == ada::scheme::type::FILE &&
first_delimiter == std::string_view::npos && !path.empty()) {
@@ -10578,7 +10574,7 @@ ada_really_inline bool shorten_path(std::string_view& path,
}
}
- // Remove path’s last item, if any.
+ // Remove path's last item, if any.
if (!path.empty()) {
size_t slash_loc = path.rfind('/');
if (slash_loc != std::string_view::npos) {
@@ -10991,7 +10987,7 @@ ada_really_inline void parse_prepared_path(std::string_view input,
}
// Otherwise, if path_buffer is not a single-dot path segment, then:
else if (!unicode::is_single_dot_path_segment(path_buffer)) {
- // If url’s scheme is "file", url’s path is empty, and path_buffer is a
+ // If url's scheme is "file", url's path is empty, and path_buffer is a
// Windows drive letter, then replace the second code point in
// path_buffer with U+003A (:).
if (type == ada::scheme::type::FILE && path.empty() &&
@@ -11002,7 +10998,7 @@ ada_really_inline void parse_prepared_path(std::string_view input,
path_buffer.remove_prefix(2);
path.append(path_buffer);
} else {
- // Append path_buffer to url’s path.
+ // Append path_buffer to url's path.
path += '/';
path.append(path_buffer);
}
@@ -11299,7 +11295,7 @@ bool url::parse_ipv6(std::string_view input) {
uint16_t value = 0, length = 0;
// While length is less than 4 and c is an ASCII hex digit,
- // set value to value × 0x10 + c interpreted as hexadecimal number, and
+ // set value to value times 0x10 + c interpreted as hexadecimal number, and
// increase pointer and length by 1.
while (length < 4 && pointer != input.end() &&
unicode::is_ascii_hex_digit(*pointer)) {
@@ -11370,7 +11366,7 @@ bool url::parse_ipv6(std::string_view input) {
ada_log("parse_ipv6 if ipv4Piece is 0, validation error");
return is_valid = false;
}
- // Otherwise, set ipv4Piece to ipv4Piece × 10 + number.
+ // Otherwise, set ipv4Piece to ipv4Piece times 10 + number.
else {
ipv4_piece = *ipv4_piece * 10 + number;
}
@@ -11385,7 +11381,8 @@ bool url::parse_ipv6(std::string_view input) {
pointer++;
}
- // Set address[pieceIndex] to address[pieceIndex] × 0x100 + ipv4Piece.
+ // Set address[pieceIndex] to address[pieceIndex] times 0x100 +
+ // ipv4Piece.
// https://stackoverflow.com/questions/39060852/why-does-the-addition-of-two-shorts-return-an-int
address[piece_index] =
uint16_t(address[piece_index] * 0x100 + *ipv4_piece);
@@ -11438,14 +11435,14 @@ bool url::parse_ipv6(std::string_view input) {
// If compress is non-null, then:
if (compress.has_value()) {
- // Let swaps be pieceIndex − compress.
+ // Let swaps be pieceIndex - compress.
int swaps = piece_index - *compress;
// Set pieceIndex to 7.
piece_index = 7;
// While pieceIndex is not 0 and swaps is greater than 0,
- // swap address[pieceIndex] with address[compress + swaps − 1], and then
+ // swap address[pieceIndex] with address[compress + swaps - 1], and then
// decrease both pieceIndex and swaps by 1.
while (piece_index != 0 && swaps > 0) {
std::swap(address[piece_index], address[*compress + swaps - 1]);
@@ -11476,7 +11473,7 @@ ada_really_inline bool url::parse_scheme(const std::string_view input) {
**/
if (is_input_special) { // fast path!!!
if (has_state_override) {
- // If url’s scheme is not a special scheme and buffer is a special scheme,
+ // If url's scheme is not a special scheme and buffer is a special scheme,
// then return.
if (is_special() != is_input_special) {
return true;
@@ -11489,7 +11486,7 @@ ada_really_inline bool url::parse_scheme(const std::string_view input) {
return true;
}
- // If url’s scheme is "file" and its host is an empty host, then return.
+ // If url's scheme is "file" and its host is an empty host, then return.
// An empty host is the empty string.
if (type == ada::scheme::type::FILE && host.has_value() &&
host.value().empty()) {
@@ -11504,7 +11501,7 @@ ada_really_inline bool url::parse_scheme(const std::string_view input) {
uint16_t urls_scheme_port = get_special_port();
if (urls_scheme_port) {
- // If url’s port is url’s scheme’s default port, then set url’s port to
+ // If url's port is url's scheme's default port, then set url's port to
// null.
if (port.has_value() && *port == urls_scheme_port) {
port = std::nullopt;
@@ -11520,8 +11517,8 @@ ada_really_inline bool url::parse_scheme(const std::string_view input) {
unicode::to_lower_ascii(_buffer.data(), _buffer.size());
if (has_state_override) {
- // If url’s scheme is a special scheme and buffer is not a special scheme,
- // then return. If url’s scheme is not a special scheme and buffer is a
+ // If url's scheme is a special scheme and buffer is not a special scheme,
+ // then return. If url's scheme is not a special scheme and buffer is a
// special scheme, then return.
if (is_special() != ada::scheme::is_special(_buffer)) {
return true;
@@ -11533,7 +11530,7 @@ ada_really_inline bool url::parse_scheme(const std::string_view input) {
return true;
}
- // If url’s scheme is "file" and its host is an empty host, then return.
+ // If url's scheme is "file" and its host is an empty host, then return.
// An empty host is the empty string.
if (type == ada::scheme::type::FILE && host.has_value() &&
host.value().empty()) {
@@ -11548,7 +11545,7 @@ ada_really_inline bool url::parse_scheme(const std::string_view input) {
uint16_t urls_scheme_port = get_special_port();
if (urls_scheme_port) {
- // If url’s port is url’s scheme’s default port, then set url’s port to
+ // If url's port is url's scheme's default port, then set url's port to
// null.
if (port.has_value() && *port == urls_scheme_port) {
port = std::nullopt;
@@ -11753,7 +11750,7 @@ namespace ada {
auto result = ada::parse(path);
if (result &&
(result->type == scheme::HTTP || result->type == scheme::HTTPS)) {
- // If pathURL’s scheme is not "http" and not "https", then return a
+ // If pathURL's scheme is not "http" and not "https", then return a
// new opaque origin.
return ada::helpers::concat(result->get_protocol(), "//",
result->get_host());
@@ -11774,9 +11771,9 @@ namespace ada {
}
[[nodiscard]] std::string url::get_host() const noexcept {
- // If url’s host is null, then return the empty string.
- // If url’s port is null, return url’s host, serialized.
- // Return url’s host, serialized, followed by U+003A (:) and url’s port,
+ // If url's host is null, then return the empty string.
+ // If url's port is null, return url's host, serialized.
+ // Return url's host, serialized, followed by U+003A (:) and url's port,
// serialized.
if (!host.has_value()) {
return "";
@@ -11796,8 +11793,8 @@ namespace ada {
}
[[nodiscard]] std::string url::get_search() const noexcept {
- // If this’s URL’s query is either null or the empty string, then return the
- // empty string. Return U+003F (?), followed by this’s URL’s query.
+ // If this's URL's query is either null or the empty string, then return the
+ // empty string. Return U+003F (?), followed by this's URL's query.
return (!query.has_value() || (query.value().empty())) ? ""
: "?" + query.value();
}
@@ -11815,8 +11812,8 @@ namespace ada {
}
[[nodiscard]] std::string url::get_hash() const noexcept {
- // If this’s URL’s fragment is either null or the empty string, then return
- // the empty string. Return U+0023 (#), followed by this’s URL’s fragment.
+ // If this's URL's fragment is either null or the empty string, then return
+ // the empty string. Return U+0023 (#), followed by this's URL's fragment.
return (!hash.has_value() || (hash.value().empty())) ? ""
: "#" + hash.value();
}
@@ -11871,7 +11868,7 @@ bool url::set_host_or_hostname(const std::string_view input) {
}
// If url is special and host_view is the empty string, validation error,
// return failure. Otherwise, if state override is given, host_view is the
- // empty string, and either url includes credentials or url’s port is
+ // empty string, and either url includes credentials or url's port is
// non-null, return.
else if (host_view.empty() &&
(is_special() || has_credentials() || port.has_value())) {
@@ -11898,7 +11895,7 @@ bool url::set_host_or_hostname(const std::string_view input) {
}
if (new_host.empty()) {
- // Set url’s host to the empty string.
+ // Set url's host to the empty string.
host = "";
} else {
// Let host be the result of host parsing buffer with url is not special.
@@ -12108,7 +12105,7 @@ result_type parse_url(std::string_view user_input,
// Most of the time, we just need user_input.size().
// In some instances, we may need a bit more.
///////////////////////////
- // This is *very* important. This line should be removed
+ // This is *very* important. This line should *not* be removed
// hastily. There are principled reasons why reserve is important
// for performance. If you have a benchmark with small inputs,
// it may not matter, but in other instances, it could.
@@ -12203,13 +12200,13 @@ result_type parse_url(std::string_view user_input,
}
ada_log("SCHEME the scheme is ", url.get_protocol());
- // If url’s scheme is "file", then:
+ // If url's scheme is "file", then:
if (url.type == ada::scheme::type::FILE) {
// Set state to file state.
state = ada::state::FILE;
}
- // Otherwise, if url is special, base is non-null, and base’s scheme
- // is url’s scheme: Note: Doing base_url->scheme is unsafe if base_url
+ // Otherwise, if url is special, base is non-null, and base's scheme
+ // is url's scheme: Note: Doing base_url->scheme is unsafe if base_url
// != nullptr is false.
else if (url.is_special() && base_url != nullptr &&
base_url->type == url.type) {
@@ -12228,7 +12225,7 @@ result_type parse_url(std::string_view user_input,
state = ada::state::PATH_OR_AUTHORITY;
input_position++;
}
- // Otherwise, set url’s path to the empty string and set state to
+ // Otherwise, set url's path to the empty string and set state to
// opaque path state.
else {
state = ada::state::OPAQUE_PATH;
@@ -12256,8 +12253,8 @@ result_type parse_url(std::string_view user_input,
return url;
}
// Otherwise, if base has an opaque path and c is U+0023 (#),
- // set url’s scheme to base’s scheme, url’s path to base’s path, url’s
- // query to base’s query, and set state to fragment state.
+ // set url's scheme to base's scheme, url's path to base's path, url's
+ // query to base's query, and set state to fragment state.
else if (base_url->has_opaque_path && fragment.has_value() &&
input_position == input_size) {
ada_log("NO_SCHEME opaque base with fragment");
@@ -12274,7 +12271,7 @@ result_type parse_url(std::string_view user_input,
url.update_unencoded_base_hash(*fragment);
return url;
}
- // Otherwise, if base’s scheme is not "file", set state to relative
+ // Otherwise, if base's scheme is not "file", set state to relative
// state and decrease pointer by 1.
else if (base_url->type != ada::scheme::type::FILE) {
ada_log("NO_SCHEME non-file relative path");
@@ -12447,7 +12444,7 @@ result_type parse_url(std::string_view user_input,
ada_log("RELATIVE_SCHEME ",
helpers::substring(url_data, input_position));
- // Set url’s scheme to base’s scheme.
+ // Set url's scheme to base's scheme.
url.copy_scheme(*base_url);
// If c is U+002F (/), then set state to relative slash state.
@@ -12467,9 +12464,9 @@ result_type parse_url(std::string_view user_input,
state = ada::state::RELATIVE_SLASH;
} else {
ada_log("RELATIVE_SCHEME otherwise");
- // Set url’s username to base’s username, url’s password to base’s
- // password, url’s host to base’s host, url’s port to base’s port,
- // url’s path to a clone of base’s path, and url’s query to base’s
+ // Set url's username to base's username, url's password to base's
+ // password, url's host to base's host, url's port to base's port,
+ // url's path to a clone of base's path, and url's query to base's
// query.
if constexpr (result_type_is_ada_url) {
url.username = base_url->username;
@@ -12495,7 +12492,7 @@ result_type parse_url(std::string_view user_input,
url.has_opaque_path = base_url->has_opaque_path;
- // If c is U+003F (?), then set url’s query to the empty string, and
+ // If c is U+003F (?), then set url's query to the empty string, and
// state to query state.
if ((input_position != input_size) &&
(url_data[input_position] == '?')) {
@@ -12503,10 +12500,10 @@ result_type parse_url(std::string_view user_input,
}
// Otherwise, if c is not the EOF code point:
else if (input_position != input_size) {
- // Set url’s query to null.
+ // Set url's query to null.
url.clear_search();
if constexpr (result_type_is_ada_url) {
- // Shorten url’s path.
+ // Shorten url's path.
helpers::shorten_path(url.path, url.type);
} else {
std::string_view path = url.get_pathname();
@@ -12539,10 +12536,10 @@ result_type parse_url(std::string_view user_input,
state = ada::state::AUTHORITY;
}
// Otherwise, set
- // - url’s username to base’s username,
- // - url’s password to base’s password,
- // - url’s host to base’s host,
- // - url’s port to base’s port,
+ // - url's username to base's username,
+ // - url's password to base's password,
+ // - url's host to base's host,
+ // - url's port to base's port,
// - state to path state, and then, decrease pointer by 1.
else {
if constexpr (result_type_is_ada_url) {
@@ -12604,7 +12601,7 @@ result_type parse_url(std::string_view user_input,
: ada::character_sets::QUERY_PERCENT_ENCODE;
// Percent-encode after encoding, with encoding, buffer, and
- // queryPercentEncodeSet, and append the result to url’s query.
+ // queryPercentEncodeSet, and append the result to url's query.
url.update_base_search(helpers::substring(url_data, input_position),
query_percent_encode_set);
ada_log("QUERY update_base_search completed ");
@@ -12635,7 +12632,7 @@ result_type parse_url(std::string_view user_input,
return url;
}
ada_log("HOST parsing results in ", url.get_hostname());
- // Set url’s host to host, buffer to the empty string, and state to
+ // Set url's host to host, buffer to the empty string, and state to
// port state.
state = ada::state::PORT;
input_position++;
@@ -12663,7 +12660,7 @@ result_type parse_url(std::string_view user_input,
ada_log("HOST parsing results in ", url.get_hostname(),
" href=", url.get_href());
- // Set url’s host to host, and state to path start state.
+ // Set url's host to host, and state to path start state.
state = ada::state::PATH_START;
}
@@ -12672,7 +12669,7 @@ result_type parse_url(std::string_view user_input,
case ada::state::OPAQUE_PATH: {
ada_log("OPAQUE_PATH ", helpers::substring(url_data, input_position));
std::string_view view = helpers::substring(url_data, input_position);
- // If c is U+003F (?), then set url’s query to the empty string and
+ // If c is U+003F (?), then set url's query to the empty string and
// state to query state.
size_t location = view.find('?');
if (location != std::string_view::npos) {
@@ -12727,7 +12724,7 @@ result_type parse_url(std::string_view user_input,
}
}
// Otherwise, if state override is not given and c is U+003F (?),
- // set url’s query to the empty string and state to query state.
+ // set url's query to the empty string and state to query state.
else if ((input_position != input_size) &&
(url_data[input_position] == '?')) {
state = ada::state::QUERY;
@@ -12781,12 +12778,12 @@ result_type parse_url(std::string_view user_input,
input_position++;
} else {
ada_log("FILE_SLASH otherwise");
- // If base is non-null and base’s scheme is "file", then:
+ // If base is non-null and base's scheme is "file", then:
// Note: it is unsafe to do base_url->scheme unless you know that
// base_url_has_value() is true.
if (base_url != nullptr &&
base_url->type == ada::scheme::type::FILE) {
- // Set url’s host to base’s host.
+ // Set url's host to base's host.
if constexpr (result_type_is_ada_url) {
url.host = base_url->host;
} else {
@@ -12794,9 +12791,9 @@ result_type parse_url(std::string_view user_input,
url.set_host(base_url->get_host());
}
// If the code point substring from pointer to the end of input does
- // not start with a Windows drive letter and base’s path[0] is a
- // normalized Windows drive letter, then append base’s path[0] to
- // url’s path.
+ // not start with a Windows drive letter and base's path[0] is a
+ // normalized Windows drive letter, then append base's path[0] to
+ // url's path.
if (!base_url->get_pathname().empty()) {
if (!checkers::is_windows_drive_letter(
helpers::substring(url_data, input_position))) {
@@ -12838,7 +12835,7 @@ result_type parse_url(std::string_view user_input,
if (checkers::is_windows_drive_letter(file_host_buffer)) {
state = ada::state::PATH;
} else if (file_host_buffer.empty()) {
- // Set url’s host to the empty string.
+ // Set url's host to the empty string.
if constexpr (result_type_is_ada_url) {
url.host = "";
} else {
@@ -12879,7 +12876,7 @@ result_type parse_url(std::string_view user_input,
url.set_protocol_as_file();
if constexpr (result_type_is_ada_url) {
- // Set url’s host to the empty string.
+ // Set url's host to the empty string.
url.host = "";
} else {
url.update_base_hostname("");
@@ -12892,11 +12889,11 @@ result_type parse_url(std::string_view user_input,
// Set state to file slash state.
state = ada::state::FILE_SLASH;
}
- // Otherwise, if base is non-null and base’s scheme is "file":
+ // Otherwise, if base is non-null and base's scheme is "file":
else if (base_url != nullptr &&
base_url->type == ada::scheme::type::FILE) {
- // Set url’s host to base’s host, url’s path to a clone of base’s
- // path, and url’s query to base’s query.
+ // Set url's host to base's host, url's path to a clone of base's
+ // path, and url's query to base's query.
ada_log("FILE base non-null");
if constexpr (result_type_is_ada_url) {
url.host = base_url->host;
@@ -12911,17 +12908,17 @@ result_type parse_url(std::string_view user_input,
}
url.has_opaque_path = base_url->has_opaque_path;
- // If c is U+003F (?), then set url’s query to the empty string and
+ // If c is U+003F (?), then set url's query to the empty string and
// state to query state.
if (input_position != input_size && url_data[input_position] == '?') {
state = ada::state::QUERY;
}
// Otherwise, if c is not the EOF code point:
else if (input_position != input_size) {
- // Set url’s query to null.
+ // Set url's query to null.
url.clear_search();
// If the code point substring from pointer to the end of input does
- // not start with a Windows drive letter, then shorten url’s path.
+ // not start with a Windows drive letter, then shorten url's path.
if (!checkers::is_windows_drive_letter(file_view)) {
if constexpr (result_type_is_ada_url) {
helpers::shorten_path(url.path, url.type);
@@ -12934,7 +12931,7 @@ result_type parse_url(std::string_view user_input,
}
// Otherwise:
else {
- // Set url’s path to an empty list.
+ // Set url's path to an empty list.
url.clear_pathname();
url.has_opaque_path = true;
}
@@ -13119,7 +13116,7 @@ template
**/
if (is_input_special) { // fast path!!!
if (has_state_override) {
- // If url’s scheme is not a special scheme and buffer is a special scheme,
+ // If url's scheme is not a special scheme and buffer is a special scheme,
// then return.
if (is_special() != is_input_special) {
return true;
@@ -13132,7 +13129,7 @@ template
return true;
}
- // If url’s scheme is "file" and its host is an empty host, then return.
+ // If url's scheme is "file" and its host is an empty host, then return.
// An empty host is the empty string.
if (type == ada::scheme::type::FILE &&
components.host_start == components.host_end) {
@@ -13147,7 +13144,7 @@ template
// This is uncommon.
uint16_t urls_scheme_port = get_special_port();
- // If url’s port is url’s scheme’s default port, then set url’s port to
+ // If url's port is url's scheme's default port, then set url's port to
// null.
if (components.port == urls_scheme_port) {
clear_port();
@@ -13161,8 +13158,8 @@ template
unicode::to_lower_ascii(_buffer.data(), _buffer.size());
if (has_state_override) {
- // If url’s scheme is a special scheme and buffer is not a special scheme,
- // then return. If url’s scheme is not a special scheme and buffer is a
+ // If url's scheme is a special scheme and buffer is not a special scheme,
+ // then return. If url's scheme is not a special scheme and buffer is a
// special scheme, then return.
if (is_special() != ada::scheme::is_special(_buffer)) {
return true;
@@ -13175,7 +13172,7 @@ template
return true;
}
- // If url’s scheme is "file" and its host is an empty host, then return.
+ // If url's scheme is "file" and its host is an empty host, then return.
// An empty host is the empty string.
if (type == ada::scheme::type::FILE &&
components.host_start == components.host_end) {
@@ -13189,7 +13186,7 @@ template
// This is uncommon.
uint16_t urls_scheme_port = get_special_port();
- // If url’s port is url’s scheme’s default port, then set url’s port to
+ // If url's port is url's scheme's default port, then set url's port to
// null.
if (components.port == urls_scheme_port) {
clear_port();
@@ -13638,7 +13635,7 @@ bool url_aggregator::set_host_or_hostname(const std::string_view input) {
}
// If url is special and host_view is the empty string, validation error,
// return failure. Otherwise, if state override is given, host_view is the
- // empty string, and either url includes credentials or url’s port is
+ // empty string, and either url includes credentials or url's port is
// non-null, return.
else if (host_view.empty() &&
(is_special() || has_credentials() ||
@@ -13674,7 +13671,7 @@ bool url_aggregator::set_host_or_hostname(const std::string_view input) {
}
if (new_host.empty()) {
- // Set url’s host to the empty string.
+ // Set url's host to the empty string.
clear_hostname();
} else {
// Let host be the result of host parsing buffer with url is not special.
@@ -13724,7 +13721,7 @@ bool url_aggregator::set_hostname(const std::string_view input) {
if (!path.empty()) {
auto out = ada::parse(path);
if (out && (out->type == scheme::HTTP || out->type == scheme::HTTPS)) {
- // If pathURL’s scheme is not "http" and not "https", then return a
+ // If pathURL's scheme is not "http" and not "https", then return a
// new opaque origin.
return helpers::concat(out->get_protocol(), "//", out->get_host());
}
@@ -13764,8 +13761,8 @@ bool url_aggregator::set_hostname(const std::string_view input) {
[[nodiscard]] std::string_view url_aggregator::get_hash() const noexcept {
ada_log("url_aggregator::get_hash");
- // If this’s URL’s fragment is either null or the empty string, then return
- // the empty string. Return U+0023 (#), followed by this’s URL’s fragment.
+ // If this's URL's fragment is either null or the empty string, then return
+ // the empty string. Return U+0023 (#), followed by this's URL's fragment.
if (components.hash_start == url_components::omitted) {
return "";
}
@@ -13823,8 +13820,8 @@ bool url_aggregator::set_hostname(const std::string_view input) {
[[nodiscard]] std::string_view url_aggregator::get_search() const noexcept {
ada_log("url_aggregator::get_search");
- // If this’s URL’s query is either null or the empty string, then return the
- // empty string. Return U+003F (?), followed by this’s URL’s query.
+ // If this's URL's query is either null or the empty string, then return the
+ // empty string. Return U+003F (?), followed by this's URL's query.
if (components.search_start == url_components::omitted) {
return "";
}
@@ -14091,7 +14088,7 @@ bool url_aggregator::parse_ipv6(std::string_view input) {
uint16_t value = 0, length = 0;
// While length is less than 4 and c is an ASCII hex digit,
- // set value to value × 0x10 + c interpreted as hexadecimal number, and
+ // set value to value times 0x10 + c interpreted as hexadecimal number, and
// increase pointer and length by 1.
while (length < 4 && pointer != input.end() &&
unicode::is_ascii_hex_digit(*pointer)) {
@@ -14161,7 +14158,7 @@ bool url_aggregator::parse_ipv6(std::string_view input) {
ada_log("parse_ipv6 if ipv4Piece is 0, validation error");
return is_valid = false;
}
- // Otherwise, set ipv4Piece to ipv4Piece × 10 + number.
+ // Otherwise, set ipv4Piece to ipv4Piece times 10 + number.
else {
ipv4_piece = *ipv4_piece * 10 + number;
}
@@ -14176,7 +14173,8 @@ bool url_aggregator::parse_ipv6(std::string_view input) {
pointer++;
}
- // Set address[pieceIndex] to address[pieceIndex] × 0x100 + ipv4Piece.
+ // Set address[pieceIndex] to address[pieceIndex] times 0x100 +
+ // ipv4Piece.
// https://stackoverflow.com/questions/39060852/why-does-the-addition-of-two-shorts-return-an-int
address[piece_index] =
uint16_t(address[piece_index] * 0x100 + *ipv4_piece);
@@ -14229,14 +14227,14 @@ bool url_aggregator::parse_ipv6(std::string_view input) {
// If compress is non-null, then:
if (compress.has_value()) {
- // Let swaps be pieceIndex − compress.
+ // Let swaps be pieceIndex - compress.
int swaps = piece_index - *compress;
// Set pieceIndex to 7.
piece_index = 7;
// While pieceIndex is not 0 and swaps is greater than 0,
- // swap address[pieceIndex] with address[compress + swaps − 1], and then
+ // swap address[pieceIndex] with address[compress + swaps - 1], and then
// decrease both pieceIndex and swaps by 1.
while (piece_index != 0 && swaps > 0) {
std::swap(address[piece_index], address[*compress + swaps - 1]);
@@ -14777,7 +14775,7 @@ inline void url_aggregator::consume_prepared_path(std::string_view input) {
}
// Otherwise, if path_buffer is not a single-dot path segment, then:
else if (!unicode::is_single_dot_path_segment(path_buffer)) {
- // If url’s scheme is "file", url’s path is empty, and path_buffer is a
+ // If url's scheme is "file", url's path is empty, and path_buffer is a
// Windows drive letter, then replace the second code point in
// path_buffer with U+003A (:).
if (type == ada::scheme::type::FILE && path.empty() &&
@@ -14788,7 +14786,7 @@ inline void url_aggregator::consume_prepared_path(std::string_view input) {
path_buffer.remove_prefix(2);
path.append(path_buffer);
} else {
- // Append path_buffer to url’s path.
+ // Append path_buffer to url's path.
path += '/';
path.append(path_buffer);
}
@@ -15173,6 +15171,25 @@ const ada_url_components* ada_get_components(ada_url result) noexcept {
}
return reinterpret_cast(&r->get_components());
}
+
+ada_owned_string ada_idna_to_unicode(const char* input, size_t length) {
+ std::string out = ada::idna::to_unicode(std::string_view(input, length));
+ ada_owned_string owned{};
+ owned.length = out.length();
+ owned.data = new char[owned.length];
+ memcpy((void*)owned.data, out.data(), owned.length);
+ return owned;
+}
+
+ada_owned_string ada_idna_to_ascii(const char* input, size_t length) {
+ std::string out = ada::idna::to_ascii(std::string_view(input, length));
+ ada_owned_string owned{};
+ owned.length = out.size();
+ owned.data = new char[owned.length];
+ memcpy((void*)owned.data, out.data(), owned.length);
+ return owned;
+}
+
} // extern "C"
/* end file src/ada_c.cpp */
/* end file src/ada.cpp */
diff --git a/deps/ada/ada.h b/deps/ada/ada.h
index 4846b3172e6c64..3f1531944e96e7 100644
--- a/deps/ada/ada.h
+++ b/deps/ada/ada.h
@@ -1,4 +1,4 @@
-/* auto-generated on 2023-06-03 12:40:57 -0400. Do not edit! */
+/* auto-generated on 2023-07-23 15:03:22 -0400. Do not edit! */
/* begin file include/ada.h */
/**
* @file ada.h
@@ -120,7 +120,6 @@ namespace ada::idna {
// this function. We also do not trim control characters. We also assume that
// the input is not empty. We return "" on error.
//
-// Example: "www.öbb.at" -> "www.xn--bb-eka.at"
//
// This function may accept or even produce invalid domains.
std::string to_ascii(std::string_view ut8_string);
@@ -926,6 +925,72 @@ constexpr uint8_t PATH_PERCENT_ENCODE[32] = {
// F8 F9 FA FB FC FD FE FF
0x01 | 0x02 | 0x04 | 0x08 | 0x10 | 0x20 | 0x40 | 0x80};
+constexpr uint8_t WWW_FORM_URLENCODED_PERCENT_ENCODE[32] = {
+ // 00 01 02 03 04 05 06 07
+ 0x01 | 0x02 | 0x04 | 0x08 | 0x10 | 0x20 | 0x40 | 0x80,
+ // 08 09 0A 0B 0C 0D 0E 0F
+ 0x01 | 0x02 | 0x04 | 0x08 | 0x10 | 0x20 | 0x40 | 0x80,
+ // 10 11 12 13 14 15 16 17
+ 0x01 | 0x02 | 0x04 | 0x08 | 0x10 | 0x20 | 0x40 | 0x80,
+ // 18 19 1A 1B 1C 1D 1E 1F
+ 0x01 | 0x02 | 0x04 | 0x08 | 0x10 | 0x20 | 0x40 | 0x80,
+ // 20 21 22 23 24 25 26 27
+ 0x00 | 0x02 | 0x04 | 0x08 | 0x10 | 0x20 | 0x40 | 0x80,
+ // 28 29 2A 2B 2C 2D 2E 2F
+ 0x01 | 0x02 | 0x00 | 0x08 | 0x10 | 0x00 | 0x00 | 0x00,
+ // 30 31 32 33 34 35 36 37
+ 0x00 | 0x00 | 0x00 | 0x00 | 0x00 | 0x00 | 0x00 | 0x00,
+ // 38 39 3A 3B 3C 3D 3E 3F
+ 0x00 | 0x00 | 0x00 | 0x00 | 0x10 | 0x00 | 0x40 | 0x80,
+ // 40 41 42 43 44 45 46 47
+ 0x00 | 0x00 | 0x00 | 0x00 | 0x00 | 0x00 | 0x00 | 0x00,
+ // 48 49 4A 4B 4C 4D 4E 4F
+ 0x00 | 0x00 | 0x00 | 0x00 | 0x00 | 0x00 | 0x00 | 0x00,
+ // 50 51 52 53 54 55 56 57
+ 0x00 | 0x00 | 0x00 | 0x00 | 0x00 | 0x00 | 0x00 | 0x00,
+ // 58 59 5A 5B 5C 5D 5E 5F
+ 0x00 | 0x00 | 0x00 | 0x00 | 0x00 | 0x00 | 0x00 | 0x00,
+ // 60 61 62 63 64 65 66 67
+ 0x01 | 0x00 | 0x00 | 0x00 | 0x00 | 0x00 | 0x00 | 0x00,
+ // 68 69 6A 6B 6C 6D 6E 6F
+ 0x00 | 0x00 | 0x00 | 0x00 | 0x00 | 0x00 | 0x00 | 0x00,
+ // 70 71 72 73 74 75 76 77
+ 0x00 | 0x00 | 0x00 | 0x00 | 0x00 | 0x00 | 0x00 | 0x00,
+ // 78 79 7A 7B 7C 7D 7E 7F
+ 0x00 | 0x00 | 0x00 | 0x08 | 0x00 | 0x20 | 0x40 | 0x80,
+ // 80 81 82 83 84 85 86 87
+ 0x01 | 0x02 | 0x04 | 0x08 | 0x10 | 0x20 | 0x40 | 0x80,
+ // 88 89 8A 8B 8C 8D 8E 8F
+ 0x01 | 0x02 | 0x04 | 0x08 | 0x10 | 0x20 | 0x40 | 0x80,
+ // 90 91 92 93 94 95 96 97
+ 0x01 | 0x02 | 0x04 | 0x08 | 0x10 | 0x20 | 0x40 | 0x80,
+ // 98 99 9A 9B 9C 9D 9E 9F
+ 0x01 | 0x02 | 0x04 | 0x08 | 0x10 | 0x20 | 0x40 | 0x80,
+ // A0 A1 A2 A3 A4 A5 A6 A7
+ 0x01 | 0x02 | 0x04 | 0x08 | 0x10 | 0x20 | 0x40 | 0x80,
+ // A8 A9 AA AB AC AD AE AF
+ 0x01 | 0x02 | 0x04 | 0x08 | 0x10 | 0x20 | 0x40 | 0x80,
+ // B0 B1 B2 B3 B4 B5 B6 B7
+ 0x01 | 0x02 | 0x04 | 0x08 | 0x10 | 0x20 | 0x40 | 0x80,
+ // B8 B9 BA BB BC BD BE BF
+ 0x01 | 0x02 | 0x04 | 0x08 | 0x10 | 0x20 | 0x40 | 0x80,
+ // C0 C1 C2 C3 C4 C5 C6 C7
+ 0x01 | 0x02 | 0x04 | 0x08 | 0x10 | 0x20 | 0x40 | 0x80,
+ // C8 C9 CA CB CC CD CE CF
+ 0x01 | 0x02 | 0x04 | 0x08 | 0x10 | 0x20 | 0x40 | 0x80,
+ // D0 D1 D2 D3 D4 D5 D6 D7
+ 0x01 | 0x02 | 0x04 | 0x08 | 0x10 | 0x20 | 0x40 | 0x80,
+ // D8 D9 DA DB DC DD DE DF
+ 0x01 | 0x02 | 0x04 | 0x08 | 0x10 | 0x20 | 0x40 | 0x80,
+ // E0 E1 E2 E3 E4 E5 E6 E7
+ 0x01 | 0x02 | 0x04 | 0x08 | 0x10 | 0x20 | 0x40 | 0x80,
+ // E8 E9 EA EB EC ED EE EF
+ 0x01 | 0x02 | 0x04 | 0x08 | 0x10 | 0x20 | 0x40 | 0x80,
+ // F0 F1 F2 F3 F4 F5 F6 F7
+ 0x01 | 0x02 | 0x04 | 0x08 | 0x10 | 0x20 | 0x40 | 0x80,
+ // F8 F9 FA FB FC FD FE FF
+ 0x01 | 0x02 | 0x04 | 0x08 | 0x10 | 0x20 | 0x40 | 0x80};
+
ada_really_inline bool bit_at(const uint8_t a[], const uint8_t i) {
return !!(a[i >> 3] & (1 << (i & 7)));
}
@@ -1375,7 +1440,7 @@ struct url_base {
[[nodiscard]] ada_really_inline bool is_special() const noexcept;
/**
- * The origin getter steps are to return the serialization of this’s URL’s
+ * The origin getter steps are to return the serialization of this's URL's
* origin. [HTML]
* @return a newly allocated string.
* @see https://url.spec.whatwg.org/#concept-url-origin
@@ -4284,10 +4349,10 @@ namespace ada::unicode {
* Given a domain, we need to identify its labels.
* They are separated by label-separators:
*
- * U+002E ( . ) FULL STOP
- * U+FF0E ( . ) FULLWIDTH FULL STOP
- * U+3002 ( 。 ) IDEOGRAPHIC FULL STOP
- * U+FF61 ( 。 ) HALFWIDTH IDEOGRAPHIC FULL STOP
+ * U+002E (.) FULL STOP
+ * U+FF0E FULLWIDTH FULL STOP
+ * U+3002 IDEOGRAPHIC FULL STOP
+ * U+FF61 HALFWIDTH IDEOGRAPHIC FULL STOP
*
* They are all mapped to U+002E.
*
@@ -4517,7 +4582,7 @@ struct url_aggregator : url_base {
[[nodiscard]] bool has_valid_domain() const noexcept override;
/**
- * The origin getter steps are to return the serialization of this’s URL’s
+ * The origin getter steps are to return the serialization of this's URL's
* origin. [HTML]
* @return a newly allocated string.
* @see https://url.spec.whatwg.org/#concept-url-origin
@@ -4533,35 +4598,35 @@ struct url_aggregator : url_base {
*/
inline std::string_view get_href() const noexcept;
/**
- * The username getter steps are to return this’s URL’s username.
+ * The username getter steps are to return this's URL's username.
* This function does not allocate memory.
* @return a lightweight std::string_view.
* @see https://url.spec.whatwg.org/#dom-url-username
*/
[[nodiscard]] std::string_view get_username() const noexcept;
/**
- * The password getter steps are to return this’s URL’s password.
+ * The password getter steps are to return this's URL's password.
* This function does not allocate memory.
* @return a lightweight std::string_view.
* @see https://url.spec.whatwg.org/#dom-url-password
*/
[[nodiscard]] std::string_view get_password() const noexcept;
/**
- * Return this’s URL’s port, serialized.
+ * Return this's URL's port, serialized.
* This function does not allocate memory.
* @return a lightweight std::string_view.
* @see https://url.spec.whatwg.org/#dom-url-port
*/
[[nodiscard]] std::string_view get_port() const noexcept;
/**
- * Return U+0023 (#), followed by this’s URL’s fragment.
+ * Return U+0023 (#), followed by this's URL's fragment.
* This function does not allocate memory.
* @return a lightweight std::string_view..
* @see https://url.spec.whatwg.org/#dom-url-hash
*/
[[nodiscard]] std::string_view get_hash() const noexcept;
/**
- * Return url’s host, serialized, followed by U+003A (:) and url’s port,
+ * Return url's host, serialized, followed by U+003A (:) and url's port,
* serialized.
* This function does not allocate memory.
* When there is no host, this function returns the empty view.
@@ -4570,7 +4635,7 @@ struct url_aggregator : url_base {
*/
[[nodiscard]] std::string_view get_host() const noexcept;
/**
- * Return this’s URL’s host, serialized.
+ * Return this's URL's host, serialized.
* This function does not allocate memory.
* When there is no host, this function returns the empty view.
* @return a lightweight std::string_view.
@@ -4579,7 +4644,7 @@ struct url_aggregator : url_base {
[[nodiscard]] std::string_view get_hostname() const noexcept;
/**
* The pathname getter steps are to return the result of URL path serializing
- * this’s URL.
+ * this's URL.
* This function does not allocate memory.
* @return a lightweight std::string_view.
* @see https://url.spec.whatwg.org/#dom-url-pathname
@@ -4593,14 +4658,14 @@ struct url_aggregator : url_base {
*/
ada_really_inline uint32_t get_pathname_length() const noexcept;
/**
- * Return U+003F (?), followed by this’s URL’s query.
+ * Return U+003F (?), followed by this's URL's query.
* This function does not allocate memory.
* @return a lightweight std::string_view.
* @see https://url.spec.whatwg.org/#dom-url-search
*/
[[nodiscard]] std::string_view get_search() const noexcept;
/**
- * The protocol getter steps are to return this’s URL’s scheme, followed by
+ * The protocol getter steps are to return this's URL's scheme, followed by
* U+003A (:).
* This function does not allocate memory.
* @return a lightweight std::string_view.
@@ -4924,48 +4989,48 @@ struct url : url_base {
/**
* @private
- * A URL’s username is an ASCII string identifying a username. It is initially
+ * A URL's username is an ASCII string identifying a username. It is initially
* the empty string.
*/
std::string username{};
/**
* @private
- * A URL’s password is an ASCII string identifying a password. It is initially
+ * A URL's password is an ASCII string identifying a password. It is initially
* the empty string.
*/
std::string password{};
/**
* @private
- * A URL’s host is null or a host. It is initially null.
+ * A URL's host is null or a host. It is initially null.
*/
std::optional host{};
/**
* @private
- * A URL’s port is either null or a 16-bit unsigned integer that identifies a
+ * A URL's port is either null or a 16-bit unsigned integer that identifies a
* networking port. It is initially null.
*/
std::optional port{};
/**
* @private
- * A URL’s path is either an ASCII string or a list of zero or more ASCII
+ * A URL's path is either an ASCII string or a list of zero or more ASCII
* strings, usually identifying a location.
*/
std::string path{};
/**
* @private
- * A URL’s query is either null or an ASCII string. It is initially null.
+ * A URL's query is either null or an ASCII string. It is initially null.
*/
std::optional query{};
/**
* @private
- * A URL’s fragment is either null or an ASCII string that can be used for
- * further processing on the resource the URL’s other components identify. It
+ * A URL's fragment is either null or an ASCII string that can be used for
+ * further processing on the resource the URL's other components identify. It
* is initially null.
*/
std::optional hash{};
@@ -4990,7 +5055,7 @@ struct url : url_base {
[[nodiscard]] ada_really_inline std::string get_href() const noexcept;
/**
- * The origin getter steps are to return the serialization of this’s URL’s
+ * The origin getter steps are to return the serialization of this's URL's
* origin. [HTML]
* @return a newly allocated string.
* @see https://url.spec.whatwg.org/#concept-url-origin
@@ -4998,7 +5063,7 @@ struct url : url_base {
[[nodiscard]] std::string get_origin() const noexcept override;
/**
- * The protocol getter steps are to return this’s URL’s scheme, followed by
+ * The protocol getter steps are to return this's URL's scheme, followed by
* U+003A (:).
* @return a newly allocated string.
* @see https://url.spec.whatwg.org/#dom-url-protocol
@@ -5006,7 +5071,7 @@ struct url : url_base {
[[nodiscard]] std::string get_protocol() const noexcept;
/**
- * Return url’s host, serialized, followed by U+003A (:) and url’s port,
+ * Return url's host, serialized, followed by U+003A (:) and url's port,
* serialized.
* When there is no host, this function returns the empty string.
* @return a newly allocated string.
@@ -5015,7 +5080,7 @@ struct url : url_base {
[[nodiscard]] std::string get_host() const noexcept;
/**
- * Return this’s URL’s host, serialized.
+ * Return this's URL's host, serialized.
* When there is no host, this function returns the empty string.
* @return a newly allocated string.
* @see https://url.spec.whatwg.org/#dom-url-hostname
@@ -5024,7 +5089,7 @@ struct url : url_base {
/**
* The pathname getter steps are to return the result of URL path serializing
- * this’s URL.
+ * this's URL.
* @return a newly allocated string.
* @see https://url.spec.whatwg.org/#dom-url-pathname
*/
@@ -5039,14 +5104,14 @@ struct url : url_base {
ada_really_inline size_t get_pathname_length() const noexcept;
/**
- * Return U+003F (?), followed by this’s URL’s query.
+ * Return U+003F (?), followed by this's URL's query.
* @return a newly allocated string.
* @see https://url.spec.whatwg.org/#dom-url-search
*/
[[nodiscard]] std::string get_search() const noexcept;
/**
- * The username getter steps are to return this’s URL’s username.
+ * The username getter steps are to return this's URL's username.
* @return a constant reference to the underlying string.
* @see https://url.spec.whatwg.org/#dom-url-username
*/
@@ -5112,21 +5177,21 @@ struct url : url_base {
bool set_href(const std::string_view input);
/**
- * The password getter steps are to return this’s URL’s password.
+ * The password getter steps are to return this's URL's password.
* @return a constant reference to the underlying string.
* @see https://url.spec.whatwg.org/#dom-url-password
*/
[[nodiscard]] const std::string &get_password() const noexcept;
/**
- * Return this’s URL’s port, serialized.
+ * Return this's URL's port, serialized.
* @return a newly constructed string representing the port.
* @see https://url.spec.whatwg.org/#dom-url-port
*/
[[nodiscard]] std::string get_port() const noexcept;
/**
- * Return U+0023 (#), followed by this’s URL’s fragment.
+ * Return U+0023 (#), followed by this's URL's fragment.
* @return a newly constructed string representing the hash.
* @see https://url.spec.whatwg.org/#dom-url-hash
*/
@@ -5212,7 +5277,7 @@ struct url : url_base {
[[nodiscard]] bool parse_opaque_host(std::string_view input);
/**
- * A URL’s scheme is an ASCII string that identifies the type of URL and can
+ * A URL's scheme is an ASCII string that identifies the type of URL and can
* be used to dispatch a URL for further processing after parsing. It is
* initially the empty string. We only set non_special_scheme when the scheme
* is non-special, otherwise we avoid constructing string.
@@ -5394,8 +5459,8 @@ size_t url::get_pathname_length() const noexcept { return path.size(); }
out.host_end = out.host_start;
if (!has_opaque_path && checkers::begins_with(path, "//")) {
- // If url’s host is null, url does not have an opaque path, url’s path’s
- // size is greater than 1, and url’s path[0] is the empty string, then
+ // If url's host is null, url does not have an opaque path, url's path's
+ // size is greater than 1, and url's path[0] is the empty string, then
// append U+002F (/) followed by U+002E (.) to output.
running_index = out.protocol_end + 2;
} else {
@@ -5509,8 +5574,8 @@ inline void url::copy_scheme(const ada::url &u) {
output += ":" + get_port();
}
} else if (!has_opaque_path && checkers::begins_with(path, "//")) {
- // If url’s host is null, url does not have an opaque path, url’s path’s
- // size is greater than 1, and url’s path[0] is the empty string, then
+ // If url's host is null, url does not have an opaque path, url's path's
+ // size is greater than 1, and url's path[0] is the empty string, then
// append U+002F (/) followed by U+002E (.) to output.
output += "/.";
}
@@ -5854,7 +5919,7 @@ inline void url_aggregator::update_base_pathname(const std::string_view input) {
if (begins_with_dashdash && !has_opaque_path && !has_authority() &&
!has_dash_dot()) {
- // If url’s host is null, url does not have an opaque path, url’s path’s
+ // If url's host is null, url does not have an opaque path, url's path's
// size is greater than 1, then append U+002F (/) followed by U+002E (.) to
// output.
buffer.insert(components.pathname_start, "/.");
@@ -6387,8 +6452,8 @@ inline bool url_aggregator::has_port() const noexcept {
}
inline bool url_aggregator::has_dash_dot() const noexcept {
- // If url’s host is null, url does not have an opaque path, url’s path’s size
- // is greater than 1, and url’s path[0] is the empty string, then append
+ // If url's host is null, url does not have an opaque path, url's path's size
+ // is greater than 1, and url's path[0] is the empty string, then append
// U+002F (/) followed by U+002E (.) to output.
ada_log("url_aggregator::has_dash_dot");
// Performance: instead of doing this potentially expensive check, we could
@@ -6484,6 +6549,254 @@ inline std::ostream &operator<<(std::ostream &out,
#endif // ADA_URL_AGGREGATOR_INL_H
/* end file include/ada/url_aggregator-inl.h */
+/* begin file include/ada/url_search_params.h */
+/**
+ * @file url_search_params.h
+ * @brief Declaration for the URL Search Params
+ */
+#ifndef ADA_URL_SEARCH_PARAMS_H
+#define ADA_URL_SEARCH_PARAMS_H
+
+#include
+#include
+#include
+#include
+
+namespace ada {
+
+/**
+ * @see https://url.spec.whatwg.org/#interface-urlsearchparams
+ */
+struct url_search_params {
+ url_search_params() = default;
+
+ /**
+ * @see
+ * https://github.com/web-platform-tests/wpt/blob/master/url/urlsearchparams-constructor.any.js
+ */
+ url_search_params(const std::string_view input) { initialize(input); }
+
+ url_search_params(const url_search_params &u) = default;
+ url_search_params(url_search_params &&u) noexcept = default;
+ url_search_params &operator=(url_search_params &&u) noexcept = default;
+ url_search_params &operator=(const url_search_params &u) = default;
+ ~url_search_params() = default;
+
+ [[nodiscard]] inline size_t size() const noexcept;
+
+ /**
+ * @see https://url.spec.whatwg.org/#dom-urlsearchparams-append
+ */
+ inline void append(std::string_view key, std::string_view value);
+
+ /**
+ * @see https://url.spec.whatwg.org/#dom-urlsearchparams-delete
+ */
+ inline void remove(std::string_view key);
+ inline void remove(std::string_view key, std::string_view value);
+
+ /**
+ * @see https://url.spec.whatwg.org/#dom-urlsearchparams-get
+ */
+ inline std::optional get(std::string_view key);
+
+ /**
+ * @see https://url.spec.whatwg.org/#dom-urlsearchparams-getall
+ */
+ inline std::vector get_all(std::string_view key);
+
+ /**
+ * @see https://url.spec.whatwg.org/#dom-urlsearchparams-has
+ */
+ inline bool has(std::string_view key) noexcept;
+
+ /**
+ * @see https://url.spec.whatwg.org/#dom-urlsearchparams-set
+ */
+ inline void set(std::string_view key, std::string_view value);
+
+ /**
+ * @see https://url.spec.whatwg.org/#dom-urlsearchparams-sort
+ */
+ inline void sort();
+
+ /**
+ * @see https://url.spec.whatwg.org/#urlsearchparams-stringification-behavior
+ */
+ inline std::string to_string();
+
+ private:
+ typedef std::pair key_value_pair;
+ std::vector params{};
+
+ /**
+ * @see https://url.spec.whatwg.org/#concept-urlencoded-parser
+ */
+ void initialize(std::string_view init);
+}; // url_search_params
+
+} // namespace ada
+#endif
+/* end file include/ada/url_search_params.h */
+/* begin file include/ada/url_search_params-inl.h */
+/**
+ * @file url_search_params-inl.h
+ * @brief Inline declarations for the URL Search Params
+ */
+#ifndef ADA_URL_SEARCH_PARAMS_INL_H
+#define ADA_URL_SEARCH_PARAMS_INL_H
+
+
+#include
+#include
+#include
+#include
+#include
+
+namespace ada {
+
+inline void url_search_params::initialize(std::string_view input) {
+ if (!input.empty() && input.front() == '?') {
+ input.remove_prefix(1);
+ }
+
+ auto process_key_value = [&](const std::string_view current) {
+ auto equal = current.find('=');
+
+ if (equal == std::string_view::npos) {
+ auto name = std::string(current);
+ std::replace(name.begin(), name.end(), '+', ' ');
+ params.emplace_back(unicode::percent_decode(name, name.find('%')), "");
+ } else {
+ auto name = std::string(current.substr(0, equal));
+ auto value = std::string(current.substr(equal + 1));
+
+ std::replace(name.begin(), name.end(), '+', ' ');
+ std::replace(value.begin(), value.end(), '+', ' ');
+
+ params.emplace_back(unicode::percent_decode(name, name.find('%')),
+ unicode::percent_decode(value, value.find('%')));
+ }
+ };
+
+ while (!input.empty()) {
+ auto ampersand_index = input.find('&');
+
+ if (ampersand_index == std::string_view::npos) {
+ if (!input.empty()) {
+ process_key_value(input);
+ }
+ break;
+ } else if (ampersand_index != 0) {
+ process_key_value(input.substr(0, ampersand_index));
+ }
+
+ input.remove_prefix(ampersand_index + 1);
+ }
+}
+
+inline void url_search_params::append(const std::string_view key,
+ const std::string_view value) {
+ params.emplace_back(key, value);
+}
+
+inline size_t url_search_params::size() const noexcept { return params.size(); }
+
+inline std::optional url_search_params::get(
+ const std::string_view key) {
+ auto entry = std::find_if(params.begin(), params.end(),
+ [&key](auto ¶m) { return param.first == key; });
+
+ if (entry == params.end()) {
+ return std::nullopt;
+ }
+
+ return entry->second;
+}
+
+inline std::vector url_search_params::get_all(
+ const std::string_view key) {
+ std::vector out{};
+
+ for (auto ¶m : params) {
+ if (param.first == key) {
+ out.emplace_back(param.second);
+ }
+ }
+
+ return out;
+}
+
+inline bool url_search_params::has(const std::string_view key) noexcept {
+ auto entry = std::find_if(params.begin(), params.end(),
+ [&key](auto ¶m) { return param.first == key; });
+ return entry != params.end();
+}
+
+inline std::string url_search_params::to_string() {
+ auto character_set = ada::character_sets::WWW_FORM_URLENCODED_PERCENT_ENCODE;
+ std::string out{};
+ for (size_t i = 0; i < params.size(); i++) {
+ auto key = ada::unicode::percent_encode(params[i].first, character_set);
+ auto value = ada::unicode::percent_encode(params[i].second, character_set);
+
+ // Performance optimization: Move this inside percent_encode.
+ std::replace(key.begin(), key.end(), ' ', '+');
+ std::replace(value.begin(), value.end(), ' ', '+');
+
+ if (i != 0) {
+ out += "&";
+ }
+ out.append(key);
+ out += "=";
+ out.append(value);
+ }
+ return out;
+}
+
+inline void url_search_params::set(const std::string_view key,
+ const std::string_view value) {
+ const auto find = [&key](auto ¶m) { return param.first == key; };
+
+ auto it = std::find_if(params.begin(), params.end(), find);
+
+ if (it == params.end()) {
+ params.emplace_back(key, value);
+ } else {
+ it->second = value;
+ params.erase(std::remove_if(std::next(it), params.end(), find),
+ params.end());
+ }
+}
+
+inline void url_search_params::remove(const std::string_view key) {
+ params.erase(
+ std::remove_if(params.begin(), params.end(),
+ [&key](auto ¶m) { return param.first == key; }),
+ params.end());
+}
+
+inline void url_search_params::remove(const std::string_view key,
+ const std::string_view value) {
+ params.erase(std::remove_if(params.begin(), params.end(),
+ [&key, &value](auto ¶m) {
+ return param.first == key &&
+ param.second == value;
+ }),
+ params.end());
+}
+
+inline void url_search_params::sort() {
+ std::stable_sort(params.begin(), params.end(),
+ [](const key_value_pair &lhs, const key_value_pair &rhs) {
+ return lhs.first < rhs.first;
+ });
+}
+
+} // namespace ada
+
+#endif // ADA_URL_SEARCH_PARAMS_INL_H
+/* end file include/ada/url_search_params-inl.h */
// Public API
/* begin file include/ada/ada_version.h */
@@ -6494,14 +6807,14 @@ inline std::ostream &operator<<(std::ostream &out,
#ifndef ADA_ADA_VERSION_H
#define ADA_ADA_VERSION_H
-#define ADA_VERSION "2.5.1"
+#define ADA_VERSION "2.6.0"
namespace ada {
enum {
ADA_VERSION_MAJOR = 2,
- ADA_VERSION_MINOR = 5,
- ADA_VERSION_REVISION = 1,
+ ADA_VERSION_MINOR = 6,
+ ADA_VERSION_REVISION = 0,
};
} // namespace ada
diff --git a/deps/ada/ada_c.h b/deps/ada/ada_c.h
index f8bcbdcd14d161..6e22584f612a75 100644
--- a/deps/ada/ada_c.h
+++ b/deps/ada/ada_c.h
@@ -97,4 +97,8 @@ bool ada_has_search(ada_url result);
// returns a pointer to the internal url_aggregator::url_components
const ada_url_components* ada_get_components(ada_url result);
+// idna methods
+ada_owned_string ada_idna_to_unicode(const char* input, size_t length);
+ada_owned_string ada_idna_to_ascii(const char* input, size_t length);
+
#endif // ADA_C_H
diff --git a/doc/contributing/maintaining/maintaining-dependencies.md b/doc/contributing/maintaining/maintaining-dependencies.md
index 8e462e995b17f0..a55df291dcfd55 100644
--- a/doc/contributing/maintaining/maintaining-dependencies.md
+++ b/doc/contributing/maintaining/maintaining-dependencies.md
@@ -9,7 +9,7 @@ All dependencies are located within the `deps` directory.
This a list of all the dependencies:
* [acorn 8.10.0][]
-* [ada 2.5.0][]
+* [ada 2.6.0][]
* [base64 0.5.0][]
* [brotli 1.0.9][]
* [c-ares 1.19.0][]
@@ -150,7 +150,7 @@ The [acorn](https://github.com/acornjs/acorn) dependency is a JavaScript parser.
[acorn-walk](https://github.com/acornjs/acorn/tree/master/acorn-walk) is
an abstract syntax tree walker for the ESTree format.
-### ada 2.5.0
+### ada 2.6.0
The [ada](https://github.com/ada-url/ada) dependency is a
fast and spec-compliant URL parser written in C++.
@@ -319,7 +319,7 @@ it comes from the Chromium team's zlib fork which incorporated
performance improvements not currently available in standard zlib.
[acorn 8.10.0]: #acorn-8100
-[ada 2.5.0]: #ada-250
+[ada 2.6.0]: #ada-260
[base64 0.5.0]: #base64-050
[brotli 1.0.9]: #brotli-109
[c-ares 1.19.0]: #c-ares-1190
From 660da785e687ab0517397616a9e55b5782582ff5 Mon Sep 17 00:00:00 2001
From: "github-actions[bot]"
<41898282+github-actions[bot]@users.noreply.github.com>
Date: Wed, 26 Jul 2023 00:15:17 +0000
Subject: [PATCH 051/157] doc: run license-builder
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
PR-URL: https://github.com/nodejs/node/pull/48898
Reviewed-By: Yagiz Nizipli
Reviewed-By: Geoffrey Booth
Reviewed-By: Luigi Pinca
Reviewed-By: Moshe Atlow
Reviewed-By: Darshan Sen
Reviewed-By: Rafael Gonzaga
Reviewed-By: Ruy Adorno
Reviewed-By: Tobias Nießen
---
LICENSE | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/LICENSE b/LICENSE
index 0888e1d0f9d08b..5688952e3ba22f 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1329,7 +1329,7 @@ The externally maintained libraries used by Node.js are:
- ada, located at deps/ada, is licensed as follows:
"""
- Copyright 2023 Ada authors
+ Copyright 2023 Yagiz Nizipli and Daniel Lemire
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
From bc009d0c10c3df7a15a3bf57146d977bf1d0a83b Mon Sep 17 00:00:00 2001
From: Darshan Sen
Date: Wed, 26 Jul 2023 15:40:35 +0530
Subject: [PATCH 052/157] sea: add support for V8 bytecode-only caching
Refs: https://github.com/nodejs/single-executable/issues/73
Signed-off-by: Darshan Sen
PR-URL: https://github.com/nodejs/node/pull/48191
Fixes: https://github.com/nodejs/single-executable/issues/73
Reviewed-By: Yagiz Nizipli
Reviewed-By: Stephen Belanger
Reviewed-By: Joyee Cheung
---
doc/api/single-executable-applications.md | 18 +-
lib/internal/modules/cjs/loader.js | 10 +-
lib/internal/util/embedding.js | 9 +-
src/json_parser.cc | 21 +-
src/json_parser.h | 4 +-
src/node_contextify.cc | 16 ++
src/node_contextify.h | 6 +
src/node_sea.cc | 187 +++++++++++++++++-
src/node_sea.h | 5 +
src/node_snapshotable.cc | 11 +-
src/util.cc | 18 ++
src/util.h | 13 ++
test/fixtures/errors/force_colors.snapshot | 4 +-
test/fixtures/sea.js | 6 +
...e-executable-application-use-code-cache.js | 60 ++++++
15 files changed, 347 insertions(+), 41 deletions(-)
create mode 100644 test/sequential/test-single-executable-application-use-code-cache.js
diff --git a/doc/api/single-executable-applications.md b/doc/api/single-executable-applications.md
index 607158643cbc3c..41a45e9dcb1cab 100644
--- a/doc/api/single-executable-applications.md
+++ b/doc/api/single-executable-applications.md
@@ -10,6 +10,9 @@ changes:
- version: REPLACEME
pr-url: https://github.com/nodejs/node/pull/46824
description: Added support for "useSnapshot".
+ - version: REPLACEME
+ pr-url: https://github.com/nodejs/node/pull/48191
+ description: Added support for "useCodeCache".
-->
> Stability: 1 - Experimental: This feature is being designed and will change.
@@ -174,7 +177,8 @@ The configuration currently reads the following top-level fields:
"main": "/path/to/bundled/script.js",
"output": "/path/to/write/the/generated/blob.blob",
"disableExperimentalSEAWarning": true, // Default: false
- "useSnapshot": false // Default: false
+ "useSnapshot": false, // Default: false
+ "useCodeCache": true // Default: false
}
```
@@ -213,6 +217,18 @@ and the main script can use the [`v8.startupSnapshot` API][] to adapt to
these constraints. See
[documentation about startup snapshot support in Node.js][].
+### V8 code cache support
+
+When `useCodeCache` is set to `true` in the configuration, during the generation
+of the single executable preparation blob, Node.js will compile the `main`
+script to generate the V8 code cache. The generated code cache would be part of
+the preparation blob and get injected into the final executable. When the single
+executable application is launched, instead of compiling the `main` script from
+scratch, Node.js would use the code cache to speed up the compilation, then
+execute the script, which would improve the startup performance.
+
+**Note:** `import()` does not work when `useCodeCache` is `true`.
+
## Notes
### `require(id)` in the injected module is not file based
diff --git a/lib/internal/modules/cjs/loader.js b/lib/internal/modules/cjs/loader.js
index 6e847d0a5c6033..19a7d7e671f5ab 100644
--- a/lib/internal/modules/cjs/loader.js
+++ b/lib/internal/modules/cjs/loader.js
@@ -1123,7 +1123,7 @@ Module.prototype.require = function(id) {
let resolvedArgv;
let hasPausedEntry = false;
let Script;
-function wrapSafe(filename, content, cjsModuleInstance) {
+function wrapSafe(filename, content, cjsModuleInstance, codeCache) {
if (patched) {
const wrapper = Module.wrap(content);
if (Script === undefined) {
@@ -1158,6 +1158,7 @@ function wrapSafe(filename, content, cjsModuleInstance) {
'__dirname',
], {
filename,
+ cachedData: codeCache,
importModuleDynamically(specifier, _, importAssertions) {
const cascadedLoader = getCascadedLoader();
return cascadedLoader.import(specifier, normalizeReferrerURL(filename),
@@ -1165,6 +1166,13 @@ function wrapSafe(filename, content, cjsModuleInstance) {
},
});
+ // The code cache is used for SEAs only.
+ if (codeCache &&
+ result.cachedDataRejected !== false &&
+ internalBinding('sea').isSea()) {
+ process.emitWarning('Code cache data rejected.');
+ }
+
// Cache the source map for the module if present.
if (result.sourceMapURL) {
maybeCacheSourceMap(filename, content, this, false, undefined, result.sourceMapURL);
diff --git a/lib/internal/util/embedding.js b/lib/internal/util/embedding.js
index e2e67202477bc7..be310f401ad115 100644
--- a/lib/internal/util/embedding.js
+++ b/lib/internal/util/embedding.js
@@ -1,7 +1,8 @@
'use strict';
-const { codes: { ERR_UNKNOWN_BUILTIN_MODULE } } = require('internal/errors');
const { BuiltinModule: { normalizeRequirableId } } = require('internal/bootstrap/realm');
const { Module, wrapSafe } = require('internal/modules/cjs/loader');
+const { codes: { ERR_UNKNOWN_BUILTIN_MODULE } } = require('internal/errors');
+const { getCodeCache, getCodePath, isSea } = internalBinding('sea');
// This is roughly the same as:
//
@@ -15,7 +16,11 @@ const { Module, wrapSafe } = require('internal/modules/cjs/loader');
function embedderRunCjs(contents) {
const filename = process.execPath;
- const compiledWrapper = wrapSafe(filename, contents);
+ const compiledWrapper = wrapSafe(
+ isSea() ? getCodePath() : filename,
+ contents,
+ undefined,
+ getCodeCache());
const customModule = new Module(filename, null);
customModule.filename = filename;
diff --git a/src/json_parser.cc b/src/json_parser.cc
index a9973c099087e5..1e19e174833fa5 100644
--- a/src/json_parser.cc
+++ b/src/json_parser.cc
@@ -4,7 +4,6 @@
#include "util-inl.h"
namespace node {
-using v8::ArrayBuffer;
using v8::Context;
using v8::Isolate;
using v8::Local;
@@ -12,26 +11,8 @@ using v8::Object;
using v8::String;
using v8::Value;
-static Isolate* NewIsolate(v8::ArrayBuffer::Allocator* allocator) {
- Isolate* isolate = Isolate::Allocate();
- CHECK_NOT_NULL(isolate);
- per_process::v8_platform.Platform()->RegisterIsolate(isolate,
- uv_default_loop());
- Isolate::CreateParams params;
- params.array_buffer_allocator = allocator;
- Isolate::Initialize(isolate, params);
- return isolate;
-}
-
-void JSONParser::FreeIsolate(Isolate* isolate) {
- per_process::v8_platform.Platform()->UnregisterIsolate(isolate);
- isolate->Dispose();
-}
-
JSONParser::JSONParser()
- : allocator_(ArrayBuffer::Allocator::NewDefaultAllocator()),
- isolate_(NewIsolate(allocator_.get())),
- handle_scope_(isolate_.get()),
+ : handle_scope_(isolate_.get()),
context_(isolate_.get(), Context::New(isolate_.get())),
context_scope_(context_.Get(isolate_.get())) {}
diff --git a/src/json_parser.h b/src/json_parser.h
index 555f539acf3076..3978a24222eb03 100644
--- a/src/json_parser.h
+++ b/src/json_parser.h
@@ -24,9 +24,7 @@ class JSONParser {
private:
// We might want a lighter-weight JSON parser for this use case. But for now
// using V8 is good enough.
- static void FreeIsolate(v8::Isolate* isolate);
- std::unique_ptr allocator_;
- DeleteFnPtr isolate_;
+ RAIIIsolate isolate_;
v8::HandleScope handle_scope_;
v8::Global context_;
v8::Context::Scope context_scope_;
diff --git a/src/node_contextify.cc b/src/node_contextify.cc
index f8bd2d9b7cd71d..ee68ed12795740 100644
--- a/src/node_contextify.cc
+++ b/src/node_contextify.cc
@@ -935,6 +935,22 @@ Maybe StoreCodeCacheResult(
return Just(true);
}
+// TODO(RaisinTen): Reuse in ContextifyContext::CompileFunction().
+MaybeLocal CompileFunction(Local context,
+ Local filename,
+ Local content,
+ std::vector>* parameters) {
+ ScriptOrigin script_origin(context->GetIsolate(), filename, 0, 0, true);
+ ScriptCompiler::Source script_source(content, script_origin);
+
+ return ScriptCompiler::CompileFunction(context,
+ &script_source,
+ parameters->size(),
+ parameters->data(),
+ 0,
+ nullptr);
+}
+
bool ContextifyScript::InstanceOf(Environment* env,
const Local& value) {
return !value.IsEmpty() &&
diff --git a/src/node_contextify.h b/src/node_contextify.h
index 3160160521e0fe..9a0cbe07d6e660 100644
--- a/src/node_contextify.h
+++ b/src/node_contextify.h
@@ -210,6 +210,12 @@ v8::Maybe StoreCodeCacheResult(
bool produce_cached_data,
std::unique_ptr new_cached_data);
+v8::MaybeLocal CompileFunction(
+ v8::Local context,
+ v8::Local filename,
+ v8::Local content,
+ std::vector>* parameters);
+
} // namespace contextify
} // namespace node
diff --git a/src/node_sea.cc b/src/node_sea.cc
index b9eabef8196750..c595afbc753a79 100644
--- a/src/node_sea.cc
+++ b/src/node_sea.cc
@@ -4,11 +4,14 @@
#include "debug_utils-inl.h"
#include "env-inl.h"
#include "json_parser.h"
+#include "node_contextify.h"
+#include "node_errors.h"
#include "node_external_reference.h"
#include "node_internals.h"
#include "node_snapshot_builder.h"
#include "node_union_bytes.h"
#include "node_v8_platform-inl.h"
+#include "util-inl.h"
// The POSTJECT_SENTINEL_FUSE macro is a string of random characters selected by
// the Node.js project that is present only once in the entire binary. It is
@@ -27,10 +30,19 @@
#if !defined(DISABLE_SINGLE_EXECUTABLE_APPLICATION)
using node::ExitCode;
+using v8::ArrayBuffer;
+using v8::BackingStore;
using v8::Context;
+using v8::DataView;
+using v8::Function;
using v8::FunctionCallbackInfo;
+using v8::HandleScope;
+using v8::Isolate;
using v8::Local;
+using v8::NewStringType;
using v8::Object;
+using v8::ScriptCompiler;
+using v8::String;
using v8::Value;
namespace node {
@@ -76,6 +88,12 @@ size_t SeaSerializer::Write(const SeaResource& sea) {
written_total += WriteArithmetic(flags);
DCHECK_EQ(written_total, SeaResource::kHeaderSize);
+ Debug("Write SEA code path %p, size=%zu\n",
+ sea.code_path.data(),
+ sea.code_path.size());
+ written_total +=
+ WriteStringView(sea.code_path, StringLogMode::kAddressAndContent);
+
Debug("Write SEA resource %s %p, size=%zu\n",
sea.use_snapshot() ? "snapshot" : "code",
sea.main_code_or_snapshot.data(),
@@ -84,6 +102,14 @@ size_t SeaSerializer::Write(const SeaResource& sea) {
WriteStringView(sea.main_code_or_snapshot,
sea.use_snapshot() ? StringLogMode::kAddressOnly
: StringLogMode::kAddressAndContent);
+
+ if (sea.code_cache.has_value()) {
+ Debug("Write SEA resource code cache %p, size=%zu\n",
+ sea.code_cache->data(),
+ sea.code_cache->size());
+ written_total +=
+ WriteStringView(sea.code_cache.value(), StringLogMode::kAddressOnly);
+ }
return written_total;
}
@@ -109,6 +135,11 @@ SeaResource SeaDeserializer::Read() {
Debug("Read SEA flags %x\n", static_cast(flags));
CHECK_EQ(read_total, SeaResource::kHeaderSize);
+ std::string_view code_path =
+ ReadStringView(StringLogMode::kAddressAndContent);
+ Debug(
+ "Read SEA code path %p, size=%zu\n", code_path.data(), code_path.size());
+
bool use_snapshot = static_cast(flags & SeaFlags::kUseSnapshot);
std::string_view code =
ReadStringView(use_snapshot ? StringLogMode::kAddressOnly
@@ -118,7 +149,15 @@ SeaResource SeaDeserializer::Read() {
use_snapshot ? "snapshot" : "code",
code.data(),
code.size());
- return {flags, code};
+
+ std::string_view code_cache;
+ if (static_cast(flags & SeaFlags::kUseCodeCache)) {
+ code_cache = ReadStringView(StringLogMode::kAddressOnly);
+ Debug("Read SEA resource code cache %p, size=%zu\n",
+ code_cache.data(),
+ code_cache.size());
+ }
+ return {flags, code_path, code, code_cache};
}
std::string_view FindSingleExecutableBlob() {
@@ -167,6 +206,10 @@ bool IsSingleExecutable() {
return postject_has_resource();
}
+void IsSea(const FunctionCallbackInfo& args) {
+ args.GetReturnValue().Set(IsSingleExecutable());
+}
+
void IsExperimentalSeaWarningNeeded(const FunctionCallbackInfo& args) {
bool is_building_sea =
!per_process::cli_options->experimental_sea_config.empty();
@@ -185,6 +228,54 @@ void IsExperimentalSeaWarningNeeded(const FunctionCallbackInfo& args) {
sea_resource.flags & SeaFlags::kDisableExperimentalSeaWarning));
}
+void GetCodeCache(const FunctionCallbackInfo& args) {
+ if (!IsSingleExecutable()) {
+ return;
+ }
+
+ Isolate* isolate = args.GetIsolate();
+
+ SeaResource sea_resource = FindSingleExecutableResource();
+
+ if (!static_cast(sea_resource.flags & SeaFlags::kUseCodeCache)) {
+ return;
+ }
+
+ std::shared_ptr backing_store = ArrayBuffer::NewBackingStore(
+ const_cast(
+ static_cast(sea_resource.code_cache->data())),
+ sea_resource.code_cache->length(),
+ [](void* /* data */, size_t /* length */, void* /* deleter_data */) {
+ // The code cache data blob is not freed here because it is a static
+ // blob which is not allocated by the BackingStore allocator.
+ },
+ nullptr);
+ Local array_buffer = ArrayBuffer::New(isolate, backing_store);
+ Local data_view =
+ DataView::New(array_buffer, 0, array_buffer->ByteLength());
+
+ args.GetReturnValue().Set(data_view);
+}
+
+void GetCodePath(const FunctionCallbackInfo& args) {
+ DCHECK(IsSingleExecutable());
+
+ Isolate* isolate = args.GetIsolate();
+
+ SeaResource sea_resource = FindSingleExecutableResource();
+
+ Local code_path;
+ if (!String::NewFromUtf8(isolate,
+ sea_resource.code_path.data(),
+ NewStringType::kNormal,
+ sea_resource.code_path.length())
+ .ToLocal(&code_path)) {
+ return;
+ }
+
+ args.GetReturnValue().Set(code_path);
+}
+
std::tuple FixupArgsForSEA(int argc, char** argv) {
// Repeats argv[0] at position 1 on argv as a replacement for the missing
// entry point file path.
@@ -269,6 +360,17 @@ std::optional ParseSingleExecutableConfig(
result.flags |= SeaFlags::kUseSnapshot;
}
+ std::optional use_code_cache =
+ parser.GetTopLevelBoolField("useCodeCache");
+ if (!use_code_cache.has_value()) {
+ FPrintF(
+ stderr, "\"useCodeCache\" field of %s is not a Boolean\n", config_path);
+ return std::nullopt;
+ }
+ if (use_code_cache.value()) {
+ result.flags |= SeaFlags::kUseCodeCache;
+ }
+
return result;
}
@@ -307,6 +409,59 @@ ExitCode GenerateSnapshotForSEA(const SeaConfig& config,
return ExitCode::kNoFailure;
}
+std::optional GenerateCodeCache(std::string_view main_path,
+ std::string_view main_script) {
+ RAIIIsolate raii_isolate;
+ Isolate* isolate = raii_isolate.get();
+
+ HandleScope handle_scope(isolate);
+ Local context = Context::New(isolate);
+ Context::Scope context_scope(context);
+
+ errors::PrinterTryCatch bootstrapCatch(
+ isolate, errors::PrinterTryCatch::kPrintSourceLine);
+
+ Local filename;
+ if (!String::NewFromUtf8(isolate,
+ main_path.data(),
+ NewStringType::kNormal,
+ main_path.length())
+ .ToLocal(&filename)) {
+ return std::nullopt;
+ }
+
+ Local content;
+ if (!String::NewFromUtf8(isolate,
+ main_script.data(),
+ NewStringType::kNormal,
+ main_script.length())
+ .ToLocal(&content)) {
+ return std::nullopt;
+ }
+
+ std::vector> parameters = {
+ FIXED_ONE_BYTE_STRING(isolate, "exports"),
+ FIXED_ONE_BYTE_STRING(isolate, "require"),
+ FIXED_ONE_BYTE_STRING(isolate, "module"),
+ FIXED_ONE_BYTE_STRING(isolate, "__filename"),
+ FIXED_ONE_BYTE_STRING(isolate, "__dirname"),
+ };
+
+ // TODO(RaisinTen): Using the V8 code cache prevents us from using `import()`
+ // in the SEA code. Support it.
+ // Refs: https://github.com/nodejs/node/pull/48191#discussion_r1213271430
+ Local fn;
+ if (!contextify::CompileFunction(context, filename, content, ¶meters)
+ .ToLocal(&fn)) {
+ return std::nullopt;
+ }
+
+ std::unique_ptr cache{
+ ScriptCompiler::CreateCodeCacheForFunction(fn)};
+ std::string code_cache(cache->data, cache->data + cache->length);
+ return code_cache;
+}
+
ExitCode GenerateSingleExecutableBlob(
const SeaConfig& config,
const std::vector& args,
@@ -331,11 +486,33 @@ ExitCode GenerateSingleExecutableBlob(
}
}
+ std::optional optional_code_cache =
+ GenerateCodeCache(config.main_path, main_script);
+ if (!optional_code_cache.has_value()) {
+ FPrintF(stderr, "Cannot generate V8 code cache\n");
+ return ExitCode::kGenericUserError;
+ }
+
+ std::optional optional_sv_code_cache;
+ std::string code_cache;
+ if (static_cast(config.flags & SeaFlags::kUseCodeCache)) {
+ std::optional optional_code_cache =
+ GenerateCodeCache(config.main_path, main_script);
+ if (!optional_code_cache.has_value()) {
+ FPrintF(stderr, "Cannot generate V8 code cache\n");
+ return ExitCode::kGenericUserError;
+ }
+ code_cache = optional_code_cache.value();
+ optional_sv_code_cache = code_cache;
+ }
+
SeaResource sea{
config.flags,
+ config.main_path,
builds_snapshot_from_main
? std::string_view{snapshot_blob.data(), snapshot_blob.size()}
- : std::string_view{main_script.data(), main_script.size()}};
+ : std::string_view{main_script.data(), main_script.size()},
+ optional_sv_code_cache};
SeaSerializer serializer;
serializer.Write(sea);
@@ -374,14 +551,20 @@ void Initialize(Local