From 1fe9d9e388ba2b989ef2c66bc86406273392a0f1 Mon Sep 17 00:00:00 2001 From: patak Date: Mon, 4 Dec 2023 23:06:23 +0100 Subject: [PATCH 1/5] feat(optimizer): holdUntilCrawlEnd option --- packages/vite/src/node/config.ts | 1 + packages/vite/src/node/optimizer/index.ts | 11 ++ packages/vite/src/node/optimizer/optimizer.ts | 140 +++++++++++++++--- 3 files changed, 135 insertions(+), 17 deletions(-) diff --git a/packages/vite/src/node/config.ts b/packages/vite/src/node/config.ts index 39de230daa4d2d..ef03a5249d493b 100644 --- a/packages/vite/src/node/config.ts +++ b/packages/vite/src/node/config.ts @@ -775,6 +775,7 @@ export async function resolveConfig( createResolver, optimizeDeps: { disabled: 'build', + holdUntilCrawlEnd: false, ...optimizeDeps, esbuildOptions: { preserveSymlinks: resolveOptions.preserveSymlinks, diff --git a/packages/vite/src/node/optimizer/index.ts b/packages/vite/src/node/optimizer/index.ts index 6beed48d27aa06..74d0d946b68b53 100644 --- a/packages/vite/src/node/optimizer/index.ts +++ b/packages/vite/src/node/optimizer/index.ts @@ -134,6 +134,17 @@ export interface DepOptimizationConfig { * @experimental */ noDiscovery?: boolean + /** + * When enabled, it will hold the first optimized deps results until all static + * imports are crawled on cold start. This avoids the need for full-page reloads + * when new dependencies are discovered and they trigger the generation of new + * common chunks. If all dependencies are found by the scanner plus the explicitely + * defined ones in `include`, it is better to disable this option to let the + * browser process more requests in parallel. + * @default true + * @experimental + */ + holdUntilCrawlEnd?: boolean } export type DepOptimizationOptions = DepOptimizationConfig & { diff --git a/packages/vite/src/node/optimizer/optimizer.ts b/packages/vite/src/node/optimizer/optimizer.ts index f580f98a0af622..338e423910a0be 100644 --- a/packages/vite/src/node/optimizer/optimizer.ts +++ b/packages/vite/src/node/optimizer/optimizer.ts @@ -101,6 +101,10 @@ async function createDepsOptimizer( let metadata = cachedMetadata || initDepsOptimizerMetadata(config, ssr, sessionTimestamp) + const options = getDepOptimizationConfig(config, ssr) + + const { noDiscovery, holdUntilCrawlEnd } = options + const depsOptimizer: DepsOptimizer = { metadata, registerMissingImport, @@ -114,7 +118,7 @@ async function createDepsOptimizer( resetRegisteredIds, ensureFirstRun, close, - options: getDepOptimizationConfig(config, ssr), + options, } depsOptimizerMap.set(config, depsOptimizer) @@ -137,6 +141,23 @@ async function createDepsOptimizer( } } + let discoveredDepsWhileScanning: string[] = [] + const logDiscoveredDepsWhileScanning = () => { + if (discoveredDepsWhileScanning.length) { + config.logger.info( + colors.green( + `✨ discovered while scanning: ${depsLogString( + discoveredDepsWhileScanning, + )}`, + ), + { + timestamp: true, + }, + ) + discoveredDepsWhileScanning = [] + } + } + let depOptimizationProcessing = promiseWithResolvers() let depOptimizationProcessingQueue: PromiseWithResolvers[] = [] const resolveEnqueuedProcessingPromises = () => { @@ -151,6 +172,7 @@ async function createDepsOptimizer( let currentlyProcessing = false let firstRunCalled = !!cachedMetadata + let warnAboutMissedDependencies = false // During build, we wait for every module to be scanned before resolving // optimized deps loading for rollup on each rebuild. It will be recreated @@ -160,7 +182,7 @@ async function createDepsOptimizer( // On warm start or after the first optimization is run, we use a simpler // debounce strategy each time a new dep is discovered. let crawlEndFinder: CrawlEndFinder | undefined - if (isBuild || !cachedMetadata) { + if ((!noDiscovery && isBuild) || !cachedMetadata) { crawlEndFinder = setupOnCrawlEnd(onCrawlEnd) } @@ -194,17 +216,17 @@ async function createDepsOptimizer( // Initialize discovered deps with manually added optimizeDeps.include info - const deps: Record = {} - await addManuallyIncludedOptimizeDeps(deps, config, ssr) + const manuallyIncludedDeps: Record = {} + await addManuallyIncludedOptimizeDeps(manuallyIncludedDeps, config, ssr) - const discovered = toDiscoveredDependencies( + const manuallyIncludedDepsInfo = toDiscoveredDependencies( config, - deps, + manuallyIncludedDeps, ssr, sessionTimestamp, ) - for (const depInfo of Object.values(discovered)) { + for (const depInfo of Object.values(manuallyIncludedDepsInfo)) { addOptimizedDepInfo(metadata, 'discovered', { ...depInfo, processing: depOptimizationProcessing.promise, @@ -212,7 +234,7 @@ async function createDepsOptimizer( newDepsDiscovered = true } - if (config.optimizeDeps.noDiscovery) { + if (noDiscovery) { // We don't need to scan for dependencies or wait for the static crawl to end // Run the first optimization run immediately runOptimizer() @@ -228,6 +250,13 @@ async function createDepsOptimizer( const deps = await discover.result discover = undefined + const manuallyIncluded = Object.keys(manuallyIncludedDepsInfo) + discoveredDepsWhileScanning.push( + ...Object.keys(metadata.discovered).filter( + (dep) => !deps[dep] && !manuallyIncluded.includes(dep), + ), + ) + // Add these dependencies to the discovered list, as these are currently // used by the preAliasPlugin to support aliased and optimized deps. // This is also used by the CJS externalization heuristics in legacy mode @@ -238,12 +267,31 @@ async function createDepsOptimizer( } const knownDeps = prepareKnownDeps() + startNextDiscoveredBatch() // For dev, we run the scanner and the first optimization - // run on the background, but we wait until crawling has ended - // to decide if we send this result to the browser or we need to - // do another optimize step + // run on the background optimizationResult = runOptimizeDeps(config, knownDeps) + + // If the holdUntilCrawlEnd stratey is used, we wait until crawling has + // ended to decide if we send this result to the browser or we need to + // do another optimize step + if (!holdUntilCrawlEnd) { + // If not, we release the result to the browser as soon as the scanner + // is done. If the scanner missed any dependency, and a new dependency + // is discovered while crawling static imports, then there will be a + // full-page reload if new common chunks are generated between the old + // and new optimized deps. + optimizationResult.result.then((result) => { + // Check if the crawling of static imports has already finished. In that + // case, the result is handled by the onCrawlEnd callback + if (!crawlEndFinder) return + + optimizationResult = undefined // signal that we'll be using the result + + runOptimizer(result) + }) + } } catch (e) { logger.error(e.stack || e.message) } finally { @@ -408,6 +456,16 @@ async function createDepsOptimizer( newDepsToLogHandle = setTimeout(() => { newDepsToLogHandle = undefined logNewlyDiscoveredDeps() + if (warnAboutMissedDependencies) { + logDiscoveredDepsWhileScanning() + config.logger.info( + colors.magenta( + `❗ add these dependencies to optimizeDeps.include to speed up cold start`, + ), + { timestamp: true }, + ) + warnAboutMissedDependencies = false + } }, 2 * debounceMs) } else { debug( @@ -440,6 +498,16 @@ async function createDepsOptimizer( if (newDepsToLogHandle) clearTimeout(newDepsToLogHandle) newDepsToLogHandle = undefined logNewlyDiscoveredDeps() + if (warnAboutMissedDependencies) { + logDiscoveredDepsWhileScanning() + config.logger.info( + colors.magenta( + `❗ add these dependencies to optimizeDeps.include to avoid a full page reload during cold start`, + ), + { timestamp: true }, + ) + warnAboutMissedDependencies = false + } } logger.info( @@ -584,7 +652,7 @@ async function createDepsOptimizer( return } // Debounced rerun, let other missing dependencies be discovered before - // the running next optimizeDeps + // the next optimizeDeps run enqueuedRerun = undefined if (debounceProcessingHandle) clearTimeout(debounceProcessingHandle) if (newDepsToLogHandle) clearTimeout(newDepsToLogHandle) @@ -614,13 +682,38 @@ async function createDepsOptimizer( return } + if (isBuild) { + currentlyProcessing = false + const crawlDeps = Object.keys(metadata.discovered) + if (crawlDeps.length === 0) { + debug?.( + colors.green( + `✨ no dependencies found while processing user modules`, + ), + ) + firstRunCalled = true + } else { + runOptimizer() + } + return + } + // Await for the scan+optimize step running in the background // It normally should be over by the time crawling of user code ended await depsOptimizer.scanProcessing - if (!isBuild && optimizationResult && !config.optimizeDeps.noDiscovery) { - const result = await optimizationResult.result - optimizationResult = undefined + if (optimizationResult) { + // In the holdUntilCrawlEnd strategy, we don't release the result of the + // post-scanner optimize step to the browser until we reach this point + // If there are new dependencies, we do another optimize run, if not, we + // use the post-scanner optimize result + // If holdUntilCrawlEnd is false and we reach here, it means that the + // scan+optimize step finished after crawl end. We follow the same + // process as in the holdUntilCrawlEnd in this case. + const afterScanResult = optimizationResult.result + optimizationResult = undefined // signal that we'll be using the result + + const result = await afterScanResult currentlyProcessing = false const crawlDeps = Object.keys(metadata.discovered) @@ -673,10 +766,23 @@ async function createDepsOptimizer( startNextDiscoveredBatch() runOptimizer(result) } + } else if (!holdUntilCrawlEnd) { + // The post-scanner optimize result has been released to the browser + // If new deps have been discovered, issue a regular rerun of the + // optimizer. A full page reload may still be avoided if the new + // optimize result is compatible in this case + if (newDepsDiscovered) { + debug?.( + colors.green( + `✨ new dependencies were found while crawling static imports, re-running optimizer`, + ), + ) + warnAboutMissedDependencies = true + debouncedProcessing(0) + } } else { - const crawlDeps = Object.keys(metadata.discovered) currentlyProcessing = false - + const crawlDeps = Object.keys(metadata.discovered) if (crawlDeps.length === 0) { debug?.( colors.green( From aa50b8d5f407d74042bb7b298e8f1baacb1bdd83 Mon Sep 17 00:00:00 2001 From: patak Date: Mon, 4 Dec 2023 23:46:31 +0100 Subject: [PATCH 2/5] chore: reduce diff --- packages/vite/src/node/optimizer/optimizer.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/vite/src/node/optimizer/optimizer.ts b/packages/vite/src/node/optimizer/optimizer.ts index 338e423910a0be..50cbda9b933a11 100644 --- a/packages/vite/src/node/optimizer/optimizer.ts +++ b/packages/vite/src/node/optimizer/optimizer.ts @@ -781,8 +781,8 @@ async function createDepsOptimizer( debouncedProcessing(0) } } else { - currentlyProcessing = false const crawlDeps = Object.keys(metadata.discovered) + currentlyProcessing = false if (crawlDeps.length === 0) { debug?.( colors.green( From 7f881c3077297a32b5692bf9d131dfcb3d351a01 Mon Sep 17 00:00:00 2001 From: patak Date: Mon, 4 Dec 2023 23:47:39 +0100 Subject: [PATCH 3/5] chore: reduce diff --- packages/vite/src/node/optimizer/optimizer.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/vite/src/node/optimizer/optimizer.ts b/packages/vite/src/node/optimizer/optimizer.ts index 50cbda9b933a11..f475cce1e20fda 100644 --- a/packages/vite/src/node/optimizer/optimizer.ts +++ b/packages/vite/src/node/optimizer/optimizer.ts @@ -783,6 +783,7 @@ async function createDepsOptimizer( } else { const crawlDeps = Object.keys(metadata.discovered) currentlyProcessing = false + if (crawlDeps.length === 0) { debug?.( colors.green( From ea5d2ac5f373a68d034ea9e7b8fdd0beb9285dde Mon Sep 17 00:00:00 2001 From: patak Date: Sun, 10 Dec 2023 22:40:48 +0100 Subject: [PATCH 4/5] chore: update default value --- packages/vite/src/node/config.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/vite/src/node/config.ts b/packages/vite/src/node/config.ts index ef03a5249d493b..d501ad25a2f219 100644 --- a/packages/vite/src/node/config.ts +++ b/packages/vite/src/node/config.ts @@ -775,7 +775,7 @@ export async function resolveConfig( createResolver, optimizeDeps: { disabled: 'build', - holdUntilCrawlEnd: false, + holdUntilCrawlEnd: true, ...optimizeDeps, esbuildOptions: { preserveSymlinks: resolveOptions.preserveSymlinks, From 6329a85ee58f83e698f7fbb2178669ca197f22bb Mon Sep 17 00:00:00 2001 From: patak Date: Thu, 18 Jan 2024 20:41:41 +0100 Subject: [PATCH 5/5] docs: add optimizeDeps.holdUntilCrawlEnd to config --- docs/config/dep-optimization-options.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs/config/dep-optimization-options.md b/docs/config/dep-optimization-options.md index 68ecc247b27760..f1c2de63e00fbc 100644 --- a/docs/config/dep-optimization-options.md +++ b/docs/config/dep-optimization-options.md @@ -62,6 +62,14 @@ Certain options are omitted since changing them would not be compatible with Vit Set to `true` to force dependency pre-bundling, ignoring previously cached optimized dependencies. +## optimizeDeps.holdUntilCrawlEnd + +- **Experimental** +- **Type:** `boolean` +- **Default:** `true` + +When enabled, it will hold the first optimized deps results until all static imports are crawled on cold start. This avoids the need for full-page reloads when new dependencies are discovered and they trigger the generation of new common chunks. If all dependencies are found by the scanner plus the explicitely defined ones in `include`, it is better to disable this option to let the browser process more requests in parallel. + ## optimizeDeps.disabled - **Deprecated**