-
-
Notifications
You must be signed in to change notification settings - Fork 6.5k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
feat(optimizer): holdUntilCrawlEnd option #15244
Changes from all commits
1fe9d9e
aa50b8d
7f881c3
ea5d2ac
5e41616
6329a85
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -93,6 +93,10 @@ async function createDepsOptimizer( | |
let metadata = | ||
cachedMetadata || initDepsOptimizerMetadata(config, ssr, sessionTimestamp) | ||
|
||
const options = getDepOptimizationConfig(config, ssr) | ||
|
||
const { noDiscovery, holdUntilCrawlEnd } = options | ||
|
||
const depsOptimizer: DepsOptimizer = { | ||
metadata, | ||
registerMissingImport, | ||
|
@@ -103,7 +107,7 @@ async function createDepsOptimizer( | |
`${depInfo.file}?v=${depInfo.browserHash}`, | ||
delayDepsOptimizerUntil, | ||
close, | ||
options: getDepOptimizationConfig(config, ssr), | ||
options, | ||
} | ||
|
||
depsOptimizerMap.set(config, depsOptimizer) | ||
|
@@ -126,6 +130,23 @@ async function createDepsOptimizer( | |
} | ||
} | ||
|
||
let discoveredDepsWhileScanning: string[] = [] | ||
const logDiscoveredDepsWhileScanning = () => { | ||
if (discoveredDepsWhileScanning.length) { | ||
config.logger.info( | ||
colors.green( | ||
`✨ discovered while scanning: ${depsLogString( | ||
discoveredDepsWhileScanning, | ||
)}`, | ||
), | ||
{ | ||
timestamp: true, | ||
}, | ||
) | ||
discoveredDepsWhileScanning = [] | ||
} | ||
} | ||
|
||
let depOptimizationProcessing = promiseWithResolvers<void>() | ||
let depOptimizationProcessingQueue: PromiseWithResolvers<void>[] = [] | ||
const resolveEnqueuedProcessingPromises = () => { | ||
|
@@ -140,6 +161,7 @@ async function createDepsOptimizer( | |
let currentlyProcessing = false | ||
|
||
let firstRunCalled = !!cachedMetadata | ||
let warnAboutMissedDependencies = false | ||
|
||
// If this is a cold run, we wait for static imports discovered | ||
// from the first request before resolving to minimize full page reloads. | ||
|
@@ -180,25 +202,25 @@ async function createDepsOptimizer( | |
|
||
// Initialize discovered deps with manually added optimizeDeps.include info | ||
|
||
const deps: Record<string, string> = {} | ||
await addManuallyIncludedOptimizeDeps(deps, config, ssr) | ||
const manuallyIncludedDeps: Record<string, string> = {} | ||
await addManuallyIncludedOptimizeDeps(manuallyIncludedDeps, config, ssr) | ||
|
||
const discovered = toDiscoveredDependencies( | ||
const manuallyIncludedDepsInfo = toDiscoveredDependencies( | ||
config, | ||
deps, | ||
manuallyIncludedDeps, | ||
ssr, | ||
sessionTimestamp, | ||
) | ||
|
||
for (const depInfo of Object.values(discovered)) { | ||
for (const depInfo of Object.values(manuallyIncludedDepsInfo)) { | ||
addOptimizedDepInfo(metadata, 'discovered', { | ||
...depInfo, | ||
processing: depOptimizationProcessing.promise, | ||
}) | ||
newDepsDiscovered = true | ||
} | ||
|
||
if (config.optimizeDeps.noDiscovery) { | ||
if (noDiscovery) { | ||
// We don't need to scan for dependencies or wait for the static crawl to end | ||
// Run the first optimization run immediately | ||
runOptimizer() | ||
|
@@ -214,6 +236,13 @@ async function createDepsOptimizer( | |
const deps = await discover.result | ||
discover = undefined | ||
|
||
const manuallyIncluded = Object.keys(manuallyIncludedDepsInfo) | ||
discoveredDepsWhileScanning.push( | ||
...Object.keys(metadata.discovered).filter( | ||
(dep) => !deps[dep] && !manuallyIncluded.includes(dep), | ||
), | ||
) | ||
|
||
// Add these dependencies to the discovered list, as these are currently | ||
// used by the preAliasPlugin to support aliased and optimized deps. | ||
// This is also used by the CJS externalization heuristics in legacy mode | ||
|
@@ -224,12 +253,31 @@ async function createDepsOptimizer( | |
} | ||
|
||
const knownDeps = prepareKnownDeps() | ||
startNextDiscoveredBatch() | ||
|
||
// For dev, we run the scanner and the first optimization | ||
// run on the background, but we wait until crawling has ended | ||
// to decide if we send this result to the browser or we need to | ||
// do another optimize step | ||
// run on the background | ||
optimizationResult = runOptimizeDeps(config, knownDeps, ssr) | ||
|
||
// If the holdUntilCrawlEnd stratey is used, we wait until crawling has | ||
// ended to decide if we send this result to the browser or we need to | ||
// do another optimize step | ||
if (!holdUntilCrawlEnd) { | ||
// If not, we release the result to the browser as soon as the scanner | ||
// is done. If the scanner missed any dependency, and a new dependency | ||
// is discovered while crawling static imports, then there will be a | ||
// full-page reload if new common chunks are generated between the old | ||
// and new optimized deps. | ||
optimizationResult.result.then((result) => { | ||
// Check if the crawling of static imports has already finished. In that | ||
// case, the result is handled by the onCrawlEnd callback | ||
if (!crawlEndFinder) return | ||
|
||
optimizationResult = undefined // signal that we'll be using the result | ||
|
||
runOptimizer(result) | ||
}) | ||
} | ||
} catch (e) { | ||
logger.error(e.stack || e.message) | ||
} finally { | ||
|
@@ -394,6 +442,16 @@ async function createDepsOptimizer( | |
newDepsToLogHandle = setTimeout(() => { | ||
newDepsToLogHandle = undefined | ||
logNewlyDiscoveredDeps() | ||
if (warnAboutMissedDependencies) { | ||
logDiscoveredDepsWhileScanning() | ||
config.logger.info( | ||
ArnaudBarre marked this conversation as resolved.
Show resolved
Hide resolved
|
||
colors.magenta( | ||
`❗ add these dependencies to optimizeDeps.include to speed up cold start`, | ||
), | ||
{ timestamp: true }, | ||
) | ||
warnAboutMissedDependencies = false | ||
} | ||
}, 2 * debounceMs) | ||
} else { | ||
debug( | ||
|
@@ -426,6 +484,16 @@ async function createDepsOptimizer( | |
if (newDepsToLogHandle) clearTimeout(newDepsToLogHandle) | ||
newDepsToLogHandle = undefined | ||
logNewlyDiscoveredDeps() | ||
if (warnAboutMissedDependencies) { | ||
logDiscoveredDepsWhileScanning() | ||
config.logger.info( | ||
colors.magenta( | ||
`❗ add these dependencies to optimizeDeps.include to avoid a full page reload during cold start`, | ||
), | ||
{ timestamp: true }, | ||
) | ||
warnAboutMissedDependencies = false | ||
} | ||
Comment on lines
+487
to
+496
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. We could ask the user to only add the dependencies that triggered the full-page reload (the ones that have custom chunks). But adding all the dependencies is better as it will avoid a new optimize run and let the browser download these dependencies as soon as the scan+optimize step is done. So better to ask for all of them to be included. |
||
} | ||
|
||
logger.info( | ||
|
@@ -562,7 +630,7 @@ async function createDepsOptimizer( | |
|
||
function debouncedProcessing(timeout = debounceMs) { | ||
// Debounced rerun, let other missing dependencies be discovered before | ||
// the running next optimizeDeps | ||
// the next optimizeDeps run | ||
enqueuedRerun = undefined | ||
if (debounceProcessingHandle) clearTimeout(debounceProcessingHandle) | ||
if (newDepsToLogHandle) clearTimeout(newDepsToLogHandle) | ||
|
@@ -593,8 +661,17 @@ async function createDepsOptimizer( | |
await depsOptimizer.scanProcessing | ||
|
||
if (optimizationResult && !config.optimizeDeps.noDiscovery) { | ||
const result = await optimizationResult.result | ||
optimizationResult = undefined | ||
// In the holdUntilCrawlEnd strategy, we don't release the result of the | ||
// post-scanner optimize step to the browser until we reach this point | ||
// If there are new dependencies, we do another optimize run, if not, we | ||
// use the post-scanner optimize result | ||
// If holdUntilCrawlEnd is false and we reach here, it means that the | ||
// scan+optimize step finished after crawl end. We follow the same | ||
// process as in the holdUntilCrawlEnd in this case. | ||
const afterScanResult = optimizationResult.result | ||
optimizationResult = undefined // signal that we'll be using the result | ||
|
||
const result = await afterScanResult | ||
currentlyProcessing = false | ||
|
||
const crawlDeps = Object.keys(metadata.discovered) | ||
|
@@ -649,6 +726,20 @@ async function createDepsOptimizer( | |
startNextDiscoveredBatch() | ||
runOptimizer(result) | ||
} | ||
} else if (!holdUntilCrawlEnd) { | ||
// The post-scanner optimize result has been released to the browser | ||
// If new deps have been discovered, issue a regular rerun of the | ||
// optimizer. A full page reload may still be avoided if the new | ||
// optimize result is compatible in this case | ||
if (newDepsDiscovered) { | ||
debug?.( | ||
colors.green( | ||
`✨ new dependencies were found while crawling static imports, re-running optimizer`, | ||
), | ||
) | ||
warnAboutMissedDependencies = true | ||
debouncedProcessing(0) | ||
} | ||
} else { | ||
const crawlDeps = Object.keys(metadata.discovered) | ||
currentlyProcessing = false | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
In case we issue the warning to add dependencies to
optimize.include
, I think we should also ask the user to add the deps discovered while the scanner was running. The scanner may end before some of these are found during the crawling of static imports in future cold starts.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
IIUC you mean we also log the dependencies found by the scanner? (Not the static imports crawler) I'm not sure if it would be ideal to request adding them to
optimizeDeps.include
manually. I think it would be enough to request from the crawler only.In general, I think missed dependencies could mean a bug in our code (e.g.
new URL('./somewhere', import.meta.url)
which I never got around fixing 😅 ), or aoptimizeDeps.entries
misconfiguration. So we can be a bit conservative in requesting adding tooptimizeDeps.include
and try to fix the root cause instead.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
No, if something is found by the scanner, it is fine. I mean logging the dependencies that were not found by the scanner or manually included. But some of them may have been found before the scanner finished and right now we are also using them on the first optimize run.
discoveredDepsWhileScanning
should maybe be namedmissedDepsWhileScanning
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Ah ok thanks for the explanation. I don't have a preference for the names, but whichever you think fits better here.