diff --git a/packages/vite/src/node/optimizer/index.ts b/packages/vite/src/node/optimizer/index.ts index 6b4448234a0944..becc1dd5fad066 100644 --- a/packages/vite/src/node/optimizer/index.ts +++ b/packages/vite/src/node/optimizer/index.ts @@ -101,11 +101,18 @@ export interface DepOptimizationResult { * for large applications */ alteredFiles: boolean + /** + * When doing a re-run, if there are newly discovered dependendencies + * the page reload will be delayed until the next rerun so the + * result will be discarded + */ + commit: () => void + cancel: () => void } export interface DepOptimizationProcessing { - promise: Promise - resolve: (result?: DepOptimizationResult) => void + promise: Promise + resolve: () => void } export interface OptimizedDepInfo { @@ -118,7 +125,7 @@ export interface OptimizedDepInfo { * During optimization, ids can still be resolved to their final location * but the bundles may not yet be saved to disk */ - processing: Promise + processing: Promise } export interface DepOptimizationMetadata { @@ -141,11 +148,6 @@ export interface DepOptimizationMetadata { * Metadata for each newly discovered dependency after processing */ discovered: Record - /** - * During optimization, ids can still be resolved to their final location - * but the bundles may not yet be saved to disk - */ - processing: Promise } /** @@ -166,7 +168,8 @@ export async function optimizeDeps( newDeps, ssr ) - await run() + const result = await run() + result.commit() return metadata } @@ -183,7 +186,7 @@ export async function createOptimizeDepsRun( ssr?: boolean ): Promise<{ metadata: DepOptimizationMetadata - run: () => Promise + run: () => Promise }> { config = { ...config, @@ -210,8 +213,7 @@ export async function createOptimizeDepsRun( hash: mainHash, browserHash: mainHash, optimized: {}, - discovered: {}, - processing: processing.promise + discovered: {} } if (!force) { @@ -227,9 +229,20 @@ export async function createOptimizeDepsRun( // hash is consistent, no need to re-bundle if (prevData && prevData.hash === metadata.hash) { log('Hash is consistent. Skipping. Use --force to override.') + // Nothing to commit or cancel as we are using the cache, we only + // need to resolve the processing promise so requests can move on + const resolve = () => { + processing.resolve() + } return { metadata: prevData, - run: () => (processing.resolve(), processing.promise) + run: async () => { + return { + alteredFiles: false, + commit: resolve, + cancel: resolve + } + } } } } @@ -315,19 +328,24 @@ export async function createOptimizeDepsRun( return { metadata, run: prebundleDeps } - async function prebundleDeps(): Promise { + async function prebundleDeps(): Promise { // We prebundle dependencies with esbuild and cache them, but there is no need // to wait here. Code that needs to access the cached deps needs to await - // the optimizeDepsMetadata.processing promise + // the optimizeDepInfo.processing promise for each dep const qualifiedIds = Object.keys(deps) if (!qualifiedIds.length) { - // Write metadata file, delete `deps` folder and rename the `processing` folder to `deps` - commitProcessingDepsCacheSync() - log(`No dependencies to bundle. Skipping.\n\n\n`) - processing.resolve() - return + return { + alteredFiles: false, + commit() { + // Write metadata file, delete `deps` folder and rename the `processing` folder to `deps` + commitProcessingDepsCacheSync() + log(`No dependencies to bundle. Skipping.\n\n\n`) + processing.resolve() + }, + cancel + } } let depsString: string @@ -510,12 +528,17 @@ export async function createOptimizeDepsRun( metadata.browserHash = newBrowserHash } - // Write metadata file, delete `deps` folder and rename the new `processing` folder to `deps` in sync - commitProcessingDepsCacheSync() - debug(`deps bundled in ${(performance.now() - start).toFixed(2)}ms`) - processing.resolve({ alteredFiles }) - return processing.promise + + return { + alteredFiles, + commit() { + // Write metadata file, delete `deps` folder and rename the new `processing` folder to `deps` in sync + commitProcessingDepsCacheSync() + processing.resolve() + }, + cancel + } } function commitProcessingDepsCacheSync() { @@ -523,12 +546,21 @@ export async function createOptimizeDepsRun( const dataPath = path.join(processingCacheDir, '_metadata.json') writeFile(dataPath, stringifyOptimizedDepsMetadata(metadata, depsCacheDir)) // Processing is done, we can now replace the depsCacheDir with processingCacheDir - if (fs.existsSync(depsCacheDir)) { - const rmSync = fs.rmSync ?? fs.rmdirSync // TODO: Remove after support for Node 12 is dropped - rmSync(depsCacheDir, { recursive: true }) - } + removeDirSync(depsCacheDir) fs.renameSync(processingCacheDir, depsCacheDir) } + + function cancel() { + removeDirSync(processingCacheDir) + processing.resolve() + } +} + +function removeDirSync(dir: string) { + if (fs.existsSync(dir)) { + const rmSync = fs.rmSync ?? fs.rmdirSync // TODO: Remove after support for Node 12 is dropped + rmSync(dir, { recursive: true }) + } } export async function findKnownImports( @@ -565,10 +597,10 @@ async function addManuallyIncludedOptimizeDeps( } export function newDepOptimizationProcessing(): DepOptimizationProcessing { - let resolve: (result?: DepOptimizationResult) => void + let resolve: () => void const promise = new Promise((_resolve) => { resolve = _resolve - }) as Promise + }) as Promise return { promise, resolve: resolve! } } @@ -638,7 +670,7 @@ export function createIsOptimizedDepUrl(config: ResolvedConfig) { function parseOptimizedDepsMetadata( jsonMetadata: string, depsCacheDir: string, - processing: Promise + processing: Promise ) { const metadata = JSON.parse(jsonMetadata, (key: string, value: string) => { // Paths can be absolute or relative to the deps cache dir where @@ -651,7 +683,7 @@ function parseOptimizedDepsMetadata( for (const o of Object.keys(metadata.optimized)) { metadata.optimized[o].processing = processing } - return { ...metadata, discovered: {}, processing } + return { ...metadata, discovered: {} } } function stringifyOptimizedDepsMetadata( diff --git a/packages/vite/src/node/optimizer/registerMissing.ts b/packages/vite/src/node/optimizer/registerMissing.ts index 2b7fe0bb8c629c..2b29d1c6a9c594 100644 --- a/packages/vite/src/node/optimizer/registerMissing.ts +++ b/packages/vite/src/node/optimizer/registerMissing.ts @@ -8,7 +8,7 @@ import { } from '.' import type { DepOptimizationMetadata, - DepOptimizationResult, + DepOptimizationProcessing, OptimizedDepInfo } from '.' import type { ViteDevServer } from '..' @@ -21,17 +21,31 @@ import { resolveSSRExternal } from '../ssr/ssrExternal' const debounceMs = 100 export function createMissingImporterRegisterFn( - server: ViteDevServer + server: ViteDevServer, + initialProcessingPromise: Promise ): (id: string, resolved: string, ssr?: boolean) => OptimizedDepInfo { const { logger } = server.config let metadata = server._optimizeDepsMetadata! let handle: NodeJS.Timeout | undefined - let needFullReload: boolean = false + let newDepsDiscovered = false let depOptimizationProcessing = newDepOptimizationProcessing() + let depOptimizationProcessingQueue: DepOptimizationProcessing[] = [] + const resolveEnqueuedProcessingPromises = () => { + // Resolve all the processings (including the ones which were delayed) + for (const processing of depOptimizationProcessingQueue) { + processing.resolve() + } + depOptimizationProcessingQueue = [] + } - let lastDepOptimizationPromise = metadata.processing + let enqueuedRerun: (() => void) | undefined + let currentlyProcessing = true + initialProcessingPromise.then(() => { + currentlyProcessing = false + enqueuedRerun?.() + }) async function rerun(ssr: boolean | undefined) { // debounce time to wait for new missing deps finished, issue a new @@ -47,24 +61,9 @@ export function createMissingImporterRegisterFn( // if the rerun fails, server._optimizeDepsMetadata remains untouched, // current discovered deps are cleaned, and a fullReload is issued - // optimizeDeps needs to be run in serie. Await until the previous - // rerun is finished here. It could happen that two reruns are queued - // in that case, we only need to run one of them - const awaitedOptimizeDepsPromise = lastDepOptimizationPromise - - await lastDepOptimizationPromise - - if (awaitedOptimizeDepsPromise !== lastDepOptimizationPromise) { - // There were two or more rerun queued and one of them already - // started. Only let through the first one, and discard the others - return - } - - if (handle) { - // New deps could have been found here, skip this rerun. Once the - // debounce time is over, a new rerun will be issued - return - } + // Ensure that rerun is called sequentially + enqueuedRerun = undefined + currentlyProcessing = true logger.info( colors.yellow( @@ -80,19 +79,22 @@ export function createMissingImporterRegisterFn( // All deps, previous known and newly discovered are rebundled, // respect insertion order to keep the metadata file stable + const newDeps: Record = {} + // Clone optimized info objects, fileHash, browserHash may be changed for them - const clonedOptimizedDeps: Record = {} - for (const o of Object.keys(metadata.optimized)) { - clonedOptimizedDeps[o] = { ...metadata.optimized[o] } + for (const dep of Object.keys(metadata.optimized)) { + newDeps[dep] = { ...metadata.optimized[dep] } + } + // Don't clone discovered info objects, they are read after awaited + for (const dep of Object.keys(metadata.discovered)) { + newDeps[dep] = metadata.discovered[dep] } - const newDeps = { ...clonedOptimizedDeps, ...metadata.discovered } - const thisDepOptimizationProcessing = depOptimizationProcessing - - // Other rerun will await until this run is finished - lastDepOptimizationPromise = thisDepOptimizationProcessing.promise + newDepsDiscovered = false - let processingResult: DepOptimizationResult | undefined + // Add the current depOptimizationProcessing to the queue, these + // promises are going to be resolved once a rerun is committed + depOptimizationProcessingQueue.push(depOptimizationProcessing) // Create a new promise for the next rerun, discovered missing // dependencies will be asigned this promise from this point @@ -110,47 +112,58 @@ export function createMissingImporterRegisterFn( ssr ) - // We await the optimizeDeps run here, we are only going to use - // the newData if there wasn't an error - newData = optimizeDeps.metadata - processingResult = await optimizeDeps.run() - - // update ssr externals - if (ssr) { - server._ssrExternals = resolveSSRExternal( - server.config, - Object.keys(newData.optimized) - ) - } + const processingResult = await optimizeDeps.run() + + const commitProcessing = () => { + processingResult.commit() - // While optimizeDeps is running, new missing deps may be discovered, - // in which case they will keep being added to metadata.discovered - for (const o of Object.keys(metadata.discovered)) { - if (!newData.optimized[o]) { - newData.discovered[o] = metadata.discovered[o] + newData = optimizeDeps.metadata + + // update ssr externals + if (ssr) { + server._ssrExternals = resolveSSRExternal( + server.config, + Object.keys(newData.optimized) + ) + } + + // While optimizeDeps is running, new missing deps may be discovered, + // in which case they will keep being added to metadata.discovered + for (const o of Object.keys(metadata.discovered)) { + if (!newData.optimized[o]) { + newData.discovered[o] = metadata.discovered[o] + } } + metadata = server._optimizeDepsMetadata = newData + + resolveEnqueuedProcessingPromises() } - newData.processing = thisDepOptimizationProcessing.promise - metadata = server._optimizeDepsMetadata = newData - if (!needFullReload && !processingResult?.alteredFiles) { + if (!processingResult.alteredFiles) { + commitProcessing() + logger.info(colors.green(`✨ new dependencies pre-bundled...`), { timestamp: true }) } else { - if (Object.keys(metadata.discovered).length > 0) { + if (newDepsDiscovered) { // There are newly discovered deps, and another rerun is about to be - // excecuted. Avoid the current full reload, but queue it for the next one - needFullReload = true + // excecuted. Avoid the current full reload discarding this rerun result + // We don't resolve the processing promise, as they will be resolved + // once a rerun is committed + processingResult.cancel() + logger.info( colors.green( - `✨ dependencies updated, delaying reload as new dependencies have been found...` + `✨ delaying reload as new dependencies have been found...` ), { timestamp: true } ) } else { + commitProcessing() + logger.info( colors.green(`✨ dependencies updated, reloading page...`), { @@ -165,15 +178,16 @@ export function createMissingImporterRegisterFn( colors.red(`error while updating dependencies:\n${e.stack}`), { timestamp: true, error: e } ) + resolveEnqueuedProcessingPromises() // Reset missing deps, let the server rediscover the dependencies metadata.discovered = {} fullReload() - } finally { - // Rerun finished, resolve the promise to let awaiting requests or - // other rerun queued be processed - thisDepOptimizationProcessing.resolve() } + + currentlyProcessing = false + // @ts-ignore + enqueuedRerun?.() } function fullReload() { @@ -186,8 +200,6 @@ export function createMissingImporterRegisterFn( type: 'full-reload', path: '*' }) - - needFullReload = false } return function registerMissingImport( @@ -205,6 +217,7 @@ export function createMissingImporterRegisterFn( // It will be processed in the next rerun call return missing } + newDepsDiscovered = true missing = metadata.discovered[id] = { file: getOptimizedDepPath(id, server.config), src: resolved, @@ -224,10 +237,14 @@ export function createMissingImporterRegisterFn( // Debounced rerun, let other missing dependencies be discovered before // the running next optimizeDeps + enqueuedRerun = undefined if (handle) clearTimeout(handle) handle = setTimeout(() => { handle = undefined - rerun(ssr) + enqueuedRerun = () => rerun(ssr) + if (!currentlyProcessing) { + enqueuedRerun() + } }, debounceMs) // Return the path for the optimized bundle, this path is known before diff --git a/packages/vite/src/node/server/index.ts b/packages/vite/src/node/server/index.ts index c198dd23294fdc..d56a6a705d5416 100644 --- a/packages/vite/src/node/server/index.ts +++ b/packages/vite/src/node/server/index.ts @@ -375,7 +375,6 @@ export async function createServer( let configFileDependencies: string[] = [] const metadata = server._optimizeDepsMetadata if (metadata) { - await metadata.processing configFileDependencies = Object.keys(metadata.optimized) } @@ -591,12 +590,19 @@ export async function createServer( // Don't await for the optimization to finish, we can start the // server right away here server._optimizeDepsMetadata = optimizeDeps.metadata - optimizeDeps.run() + + // Run deps optimization in parallel + const initialProcessingPromise = optimizeDeps + .run() + .then((result) => result.commit()) // While running the first optimizeDeps, _registerMissingImport is null // so the resolve plugin resolves straight to node_modules during the // deps discovery scan phase - server._registerMissingImport = createMissingImporterRegisterFn(server) + server._registerMissingImport = createMissingImporterRegisterFn( + server, + initialProcessingPromise + ) } if (!middlewareMode && httpServer) {