From 676f54584c981c18413194bb539165292a66bf7c Mon Sep 17 00:00:00 2001 From: patak Date: Wed, 23 Mar 2022 09:13:06 +0100 Subject: [PATCH] feat: non-blocking scanning of dependencies (#7379) --- .../playground/cli-module/__tests__/serve.js | 2 +- packages/vite/src/node/index.ts | 3 +- .../src/node/optimizer/esbuildDepPlugin.ts | 10 +- packages/vite/src/node/optimizer/index.ts | 858 +++++++++--------- .../src/node/optimizer/registerMissing.ts | 283 ++++-- packages/vite/src/node/optimizer/scan.ts | 15 +- packages/vite/src/node/plugin.ts | 9 +- .../vite/src/node/plugins/importAnalysis.ts | 30 +- .../vite/src/node/plugins/optimizedDeps.ts | 10 +- packages/vite/src/node/plugins/preAlias.ts | 6 +- packages/vite/src/node/plugins/resolve.ts | 60 +- packages/vite/src/node/server/index.ts | 70 +- .../vite/src/node/server/pluginContainer.ts | 15 +- 13 files changed, 736 insertions(+), 635 deletions(-) diff --git a/packages/playground/cli-module/__tests__/serve.js b/packages/playground/cli-module/__tests__/serve.js index 1cda05f0adc21a..2b354f566524bf 100644 --- a/packages/playground/cli-module/__tests__/serve.js +++ b/packages/playground/cli-module/__tests__/serve.js @@ -78,7 +78,7 @@ exports.serve = async function serve(root, isProd) { const timeoutError = `server process still alive after 3s` try { killProcess(serverProcess) - await resolvedOrTimeout(serverProcess, 3000, timeoutError) + await resolvedOrTimeout(serverProcess, 10000, timeoutError) } catch (e) { if (e === timeoutError || (!serverProcess.killed && !isWindows)) { collectErrorStreams('server', e) diff --git a/packages/vite/src/node/index.ts b/packages/vite/src/node/index.ts index f0f217c9be03d3..027a715c454a74 100644 --- a/packages/vite/src/node/index.ts +++ b/packages/vite/src/node/index.ts @@ -40,7 +40,8 @@ export type { DepOptimizationOptions, DepOptimizationResult, DepOptimizationProcessing, - OptimizedDepInfo + OptimizedDepInfo, + OptimizedDeps } from './optimizer' export type { Plugin } from './plugin' export type { PackageCache, PackageData } from './packages' diff --git a/packages/vite/src/node/optimizer/esbuildDepPlugin.ts b/packages/vite/src/node/optimizer/esbuildDepPlugin.ts index 3ff86c213a54a2..4303be0ec876e7 100644 --- a/packages/vite/src/node/optimizer/esbuildDepPlugin.ts +++ b/packages/vite/src/node/optimizer/esbuildDepPlugin.ts @@ -37,8 +37,7 @@ const externalTypes = [ export function esbuildDepPlugin( qualified: Record, exportsData: Record, - config: ResolvedConfig, - ssr?: boolean + config: ResolvedConfig ): Plugin { // remove optimizable extensions from `externalTypes` list const allExternalTypes = config.optimizeDeps.extensions @@ -48,12 +47,13 @@ export function esbuildDepPlugin( : externalTypes // default resolver which prefers ESM - const _resolve = config.createResolver({ asSrc: false }) + const _resolve = config.createResolver({ asSrc: false, scan: true }) // cjs resolver that prefers Node const _resolveRequire = config.createResolver({ asSrc: false, - isRequire: true + isRequire: true, + scan: true }) const resolve = ( @@ -72,7 +72,7 @@ export function esbuildDepPlugin( _importer = importer in qualified ? qualified[importer] : importer } const resolver = kind.startsWith('require') ? _resolveRequire : _resolve - return resolver(id, _importer, undefined, ssr) + return resolver(id, _importer, undefined) } return { diff --git a/packages/vite/src/node/optimizer/index.ts b/packages/vite/src/node/optimizer/index.ts index 2bdd54dc3a899a..cefa24133db529 100644 --- a/packages/vite/src/node/optimizer/index.ts +++ b/packages/vite/src/node/optimizer/index.ts @@ -21,7 +21,8 @@ import { scanImports } from './scan' import { transformWithEsbuild } from '../plugins/esbuild' import { performance } from 'perf_hooks' -const debug = createDebugger('vite:deps') +export const debuggerViteDeps = createDebugger('vite:deps') +const debug = debuggerViteDeps const isDebugEnabled = _debug('vite:deps').enabled const jsExtensionRE = /\.js$/i @@ -33,6 +34,12 @@ export type ExportsData = ReturnType & { hasReExports?: true } +export interface OptimizedDeps { + metadata: DepOptimizationMetadata + scanProcessing?: Promise + registerMissingImport: (id: string, resolved: string) => OptimizedDepInfo +} + export interface DepOptimizationOptions { /** * By default, Vite will crawl your `index.html` to detect dependencies that @@ -97,17 +104,11 @@ export interface DepOptimizationOptions { } export interface DepOptimizationResult { - /** - * After a re-optimization, the internal bundled chunks may change - * and a full page reload is required if that is the case - * If the files are stable, we can avoid the reload that is expensive - * for large applications - */ - alteredFiles: boolean + metadata: DepOptimizationMetadata /** * When doing a re-run, if there are newly discovered dependendencies - * the page reload will be delayed until the next rerun so the - * result will be discarded + * the page reload will be delayed until the next rerun so we need + * to be able to discard the result */ commit: () => void cancel: () => void @@ -119,8 +120,9 @@ export interface DepOptimizationProcessing { } export interface OptimizedDepInfo { + id: string file: string - src: string + src?: string needsInterop?: boolean browserHash?: string fileHash?: string @@ -155,6 +157,10 @@ export interface DepOptimizationMetadata { * Metadata for each newly discovered dependency after processing */ discovered: Record + /** + * OptimizedDepInfo list + */ + depInfoList: OptimizedDepInfo[] } /** @@ -163,45 +169,65 @@ export interface DepOptimizationMetadata { export async function optimizeDeps( config: ResolvedConfig, force = config.server.force, - asCommand = false, - newDeps?: Record, // missing imports encountered after server has started - ssr?: boolean + asCommand = false ): Promise { - const { metadata, run } = await createOptimizeDepsRun( + const log = asCommand ? config.logger.info : debug + + const cachedMetadata = loadCachedDepOptimizationMetadata( config, force, - asCommand, - null, - newDeps, - ssr + asCommand ) - const result = await run() + if (cachedMetadata) { + return cachedMetadata + } + const depsInfo = await discoverProjectDependencies(config) + + const depsString = depsLogString(Object.keys(depsInfo)) + log(colors.green(`Optimizing dependencies:\n ${depsString}`)) + + const result = await runOptimizeDeps(config, depsInfo) + result.commit() - return metadata + + return result.metadata +} + +export function createOptimizedDepsMetadata( + config: ResolvedConfig, + timestamp?: string +): DepOptimizationMetadata { + const hash = getDepHash(config) + return { + hash, + browserHash: getOptimizedBrowserHash(hash, {}, timestamp), + optimized: {}, + chunks: {}, + discovered: {}, + depInfoList: [] + } +} + +export function addOptimizedDepInfo( + metadata: DepOptimizationMetadata, + type: 'optimized' | 'discovered' | 'chunks', + depInfo: OptimizedDepInfo +): OptimizedDepInfo { + metadata[type][depInfo.id] = depInfo + metadata.depInfoList.push(depInfo) + return depInfo } /** - * Internally, Vite uses this function to prepare a optimizeDeps run. When Vite starts, we can get - * the metadata and start the server without waiting for the optimizeDeps processing to be completed + * Creates the initial dep optimization metadata, loading it from the deps cache + * if it exists and pre-bundling isn't forced */ -export async function createOptimizeDepsRun( +export function loadCachedDepOptimizationMetadata( config: ResolvedConfig, force = config.server.force, - asCommand = false, - currentData: DepOptimizationMetadata | null = null, - newDeps?: Record, // missing imports encountered after server has started - ssr?: boolean -): Promise<{ - metadata: DepOptimizationMetadata - run: () => Promise -}> { - config = { - ...config, - command: 'build' - } - - const { root, logger } = config - const log = asCommand ? logger.info : debug + asCommand = false +): DepOptimizationMetadata | undefined { + const log = asCommand ? config.logger.info : debug // Before Vite 2.9, dependencies were cached in the root of the cacheDir // For compat, we remove the cache if we find the old structure @@ -210,49 +236,103 @@ export async function createOptimizeDepsRun( } const depsCacheDir = getDepsCacheDir(config) - const processingCacheDir = getProcessingDepsCacheDir(config) - - const mainHash = getDepHash(root, config) - - const processing = newDepOptimizationProcessing() - - const metadata: DepOptimizationMetadata = { - hash: mainHash, - browserHash: mainHash, - optimized: {}, - chunks: {}, - discovered: {} - } if (!force) { - let prevData: DepOptimizationMetadata | undefined + let cachedMetadata: DepOptimizationMetadata | undefined try { - const prevDataPath = path.join(depsCacheDir, '_metadata.json') - prevData = parseOptimizedDepsMetadata( - fs.readFileSync(prevDataPath, 'utf-8'), + const cachedMetadataPath = path.join(depsCacheDir, '_metadata.json') + cachedMetadata = parseOptimizedDepsMetadata( + fs.readFileSync(cachedMetadataPath, 'utf-8'), depsCacheDir ) } catch (e) {} // hash is consistent, no need to re-bundle - if (prevData && prevData.hash === metadata.hash) { + if (cachedMetadata && cachedMetadata.hash === getDepHash(config)) { log('Hash is consistent. Skipping. Use --force to override.') // Nothing to commit or cancel as we are using the cache, we only // need to resolve the processing promise so requests can move on - const resolve = () => { - processing.resolve() - } - return { - metadata: prevData, - run: async () => { - return { - alteredFiles: false, - commit: resolve, - cancel: resolve - } - } - } + return cachedMetadata + } + } + + // Start with a fresh cache + removeDirSync(depsCacheDir) +} + +/** + * Initial optimizeDeps at server start. Perform a fast scan using esbuild to + * find deps to pre-bundle and include user hard-coded dependencies + */ +export async function discoverProjectDependencies( + config: ResolvedConfig, + timestamp?: string +): Promise> { + const { deps, missing } = await scanImports(config) + + const missingIds = Object.keys(missing) + if (missingIds.length) { + throw new Error( + `The following dependencies are imported but could not be resolved:\n\n ${missingIds + .map( + (id) => + `${colors.cyan(id)} ${colors.white( + colors.dim(`(imported by ${missing[id]})`) + )}` + ) + .join(`\n `)}\n\nAre they installed?` + ) + } + + await addManuallyIncludedOptimizeDeps(deps, config) + + const browserHash = getOptimizedBrowserHash( + getDepHash(config), + deps, + timestamp + ) + const discovered: Record = {} + for (const id in deps) { + const entry = deps[id] + discovered[id] = { + id, + file: getOptimizedDepPath(id, config), + src: entry, + browserHash: browserHash } } + return discovered +} + +export function depsLogString(qualifiedIds: string[]): string { + if (isDebugEnabled) { + return colors.yellow(qualifiedIds.join(`\n `)) + } else { + const total = qualifiedIds.length + const maxListed = 5 + const listed = Math.min(total, maxListed) + const extra = Math.max(0, total - maxListed) + return colors.yellow( + qualifiedIds.slice(0, listed).join(`, `) + + (extra > 0 ? `, ...and ${extra} more` : ``) + ) + } +} + +/** + * Internally, Vite uses this function to prepare a optimizeDeps run. When Vite starts, we can get + * the metadata and start the server without waiting for the optimizeDeps processing to be completed + */ +export async function runOptimizeDeps( + config: ResolvedConfig, + depsInfo: Record +): Promise { + config = { + ...config, + command: 'build' + } + + const depsCacheDir = getDepsCacheDir(config) + const processingCacheDir = getProcessingDepsCacheDir(config) // Create a temporal directory so we don't need to delete optimized deps // until they have been processed. This also avoids leaving the deps cache @@ -270,319 +350,191 @@ export async function createOptimizeDepsRun( JSON.stringify({ type: 'module' }) ) - let newBrowserHash: string - - let deps: Record - if (!newDeps) { - // Initial optimizeDeps at server start. Perform a fast scan using esbuild to - // find deps to pre-bundle and include user hard-coded dependencies - - let missing: Record - ;({ deps, missing } = await scanImports(config)) - - const missingIds = Object.keys(missing) - if (missingIds.length) { - processing.resolve() - throw new Error( - `The following dependencies are imported but could not be resolved:\n\n ${missingIds - .map( - (id) => - `${colors.cyan(id)} ${colors.white( - colors.dim(`(imported by ${missing[id]})`) - )}` - ) - .join(`\n `)}\n\nAre they installed?` - ) - } - - try { - await addManuallyIncludedOptimizeDeps(deps, config) - } catch (e) { - processing.resolve() - throw e - } - - // update browser hash - newBrowserHash = metadata.browserHash = getOptimizedBrowserHash( - metadata.hash, - deps - ) + const metadata = createOptimizedDepsMetadata(config) - // We generate the mapping of dependency ids to their cache file location - // before processing the dependencies with esbuild. This allow us to continue - // processing files in the importAnalysis and resolve plugins - for (const id in deps) { - const entry = deps[id] - metadata.optimized[id] = { - file: getOptimizedDepPath(id, config), - src: entry, - browserHash: newBrowserHash, - processing: processing.promise - } - } - } else { - // Missing dependencies were found at run-time, optimizeDeps called while the - // server is running - deps = depsFromOptimizedDepInfo(newDeps) + metadata.browserHash = getOptimizedBrowserHash( + metadata.hash, + depsFromOptimizedDepInfo(depsInfo) + ) - metadata.optimized = newDeps + // We prebundle dependencies with esbuild and cache them, but there is no need + // to wait here. Code that needs to access the cached deps needs to await + // the optimizedDepInfo.processing promise for each dep - // For reruns keep current global browser hash and newDeps individual hashes until we know - // if files are stable so we can avoid a full page reload - metadata.browserHash = currentData!.browserHash - newBrowserHash = getOptimizedBrowserHash(metadata.hash, deps) - } + const qualifiedIds = Object.keys(depsInfo) - return { metadata, run: prebundleDeps } - - async function prebundleDeps(): Promise { - // We prebundle dependencies with esbuild and cache them, but there is no need - // to wait here. Code that needs to access the cached deps needs to await - // the optimizeDepInfo.processing promise for each dep - - const qualifiedIds = Object.keys(deps) - - if (!qualifiedIds.length) { - return { - alteredFiles: false, - commit() { - // Write metadata file, delete `deps` folder and rename the `processing` folder to `deps` - commitProcessingDepsCacheSync() - log(`No dependencies to bundle. Skipping.\n\n\n`) - processing.resolve() - }, - cancel - } + if (!qualifiedIds.length) { + return { + metadata, + commit() { + // Write metadata file, delete `deps` folder and rename the `processing` folder to `deps` + commitProcessingDepsCacheSync() + config.logger.info(`No dependencies to bundle. Skipping.\n\n\n`) + }, + cancel } + } - let depsString: string - if (isDebugEnabled) { - depsString = colors.yellow(qualifiedIds.join(`\n `)) + // esbuild generates nested directory output with lowest common ancestor base + // this is unpredictable and makes it difficult to analyze entry / output + // mapping. So what we do here is: + // 1. flatten all ids to eliminate slash + // 2. in the plugin, read the entry ourselves as virtual files to retain the + // path. + const flatIdDeps: Record = {} + const idToExports: Record = {} + const flatIdToExports: Record = {} + + const { plugins = [], ...esbuildOptions } = + config.optimizeDeps?.esbuildOptions ?? {} + + await init + for (const id in depsInfo) { + const flatId = flattenId(id) + const filePath = (flatIdDeps[flatId] = depsInfo[id].src!) + let exportsData: ExportsData + if (config.optimizeDeps.extensions?.some((ext) => filePath.endsWith(ext))) { + // For custom supported extensions, build the entry file to transform it into JS, + // and then parse with es-module-lexer. Note that the `bundle` option is not `true`, + // so only the entry file is being transformed. + const result = await build({ + ...esbuildOptions, + plugins, + entryPoints: [filePath], + write: false, + format: 'esm' + }) + exportsData = parse(result.outputFiles[0].text) as ExportsData } else { - const total = qualifiedIds.length - const maxListed = 5 - const listed = Math.min(total, maxListed) - const extra = Math.max(0, total - maxListed) - depsString = colors.yellow( - qualifiedIds.slice(0, listed).join(`\n `) + - (extra > 0 ? `\n (...and ${extra} more)` : ``) - ) - } - - if (!asCommand) { - if (!newDeps) { - // This is auto run on server start - let the user know that we are - // pre-optimizing deps - logger.info(colors.green(`Pre-bundling dependencies:\n ${depsString}`)) - logger.info( - `(this will be run only when your dependencies or config have changed)` + const entryContent = fs.readFileSync(filePath, 'utf-8') + try { + exportsData = parse(entryContent) as ExportsData + } catch { + debug( + `Unable to parse dependency: ${id}. Trying again with a JSX transform.` ) - } - } else { - logger.info(colors.green(`Optimizing dependencies:\n ${depsString}`)) - } - - // esbuild generates nested directory output with lowest common ancestor base - // this is unpredictable and makes it difficult to analyze entry / output - // mapping. So what we do here is: - // 1. flatten all ids to eliminate slash - // 2. in the plugin, read the entry ourselves as virtual files to retain the - // path. - const flatIdDeps: Record = {} - const idToExports: Record = {} - const flatIdToExports: Record = {} - - const { plugins = [], ...esbuildOptions } = - config.optimizeDeps?.esbuildOptions ?? {} - - await init - for (const id in deps) { - const flatId = flattenId(id) - const filePath = (flatIdDeps[flatId] = deps[id]) - let exportsData: ExportsData - if ( - config.optimizeDeps.extensions?.some((ext) => filePath.endsWith(ext)) - ) { - // For custom supported extensions, build the entry file to transform it into JS, - // and then parse with es-module-lexer. Note that the `bundle` option is not `true`, - // so only the entry file is being transformed. - const result = await build({ - ...esbuildOptions, - plugins, - entryPoints: [filePath], - write: false, - format: 'esm' + const transformed = await transformWithEsbuild(entryContent, filePath, { + loader: 'jsx' }) - exportsData = parse(result.outputFiles[0].text) as ExportsData - } else { - const entryContent = fs.readFileSync(filePath, 'utf-8') - try { - exportsData = parse(entryContent) as ExportsData - } catch { - debug( - `Unable to parse dependency: ${id}. Trying again with a JSX transform.` - ) - const transformed = await transformWithEsbuild( - entryContent, - filePath, - { - loader: 'jsx' - } - ) - // Ensure that optimization won't fail by defaulting '.js' to the JSX parser. - // This is useful for packages such as Gatsby. - esbuildOptions.loader = { - '.js': 'jsx', - ...esbuildOptions.loader - } - exportsData = parse(transformed.code) as ExportsData + // Ensure that optimization won't fail by defaulting '.js' to the JSX parser. + // This is useful for packages such as Gatsby. + esbuildOptions.loader = { + '.js': 'jsx', + ...esbuildOptions.loader } - for (const { ss, se } of exportsData[0]) { - const exp = entryContent.slice(ss, se) - if (/export\s+\*\s+from/.test(exp)) { - exportsData.hasReExports = true - } + exportsData = parse(transformed.code) as ExportsData + } + for (const { ss, se } of exportsData[0]) { + const exp = entryContent.slice(ss, se) + if (/export\s+\*\s+from/.test(exp)) { + exportsData.hasReExports = true } } - - idToExports[id] = exportsData - flatIdToExports[flatId] = exportsData } - const define: Record = { - 'process.env.NODE_ENV': JSON.stringify(config.mode) - } - for (const key in config.define) { - const value = config.define[key] - define[key] = typeof value === 'string' ? value : JSON.stringify(value) - } + idToExports[id] = exportsData + flatIdToExports[flatId] = exportsData + } - const start = performance.now() - - const result = await build({ - absWorkingDir: process.cwd(), - entryPoints: Object.keys(flatIdDeps), - bundle: true, - format: 'esm', - target: config.build.target || undefined, - external: config.optimizeDeps?.exclude, - logLevel: 'error', - splitting: true, - sourcemap: true, - outdir: processingCacheDir, - ignoreAnnotations: true, - metafile: true, - define, - plugins: [ - ...plugins, - esbuildDepPlugin(flatIdDeps, flatIdToExports, config, ssr) - ], - ...esbuildOptions - }) + const define: Record = { + 'process.env.NODE_ENV': JSON.stringify(config.mode) + } + for (const key in config.define) { + const value = config.define[key] + define[key] = typeof value === 'string' ? value : JSON.stringify(value) + } - const meta = result.metafile! + const start = performance.now() + + const result = await build({ + absWorkingDir: process.cwd(), + entryPoints: Object.keys(flatIdDeps), + bundle: true, + format: 'esm', + target: config.build.target || undefined, + external: config.optimizeDeps?.exclude, + logLevel: 'error', + splitting: true, + sourcemap: true, + outdir: processingCacheDir, + ignoreAnnotations: true, + metafile: true, + define, + plugins: [ + ...plugins, + esbuildDepPlugin(flatIdDeps, flatIdToExports, config) + ], + ...esbuildOptions + }) - // the paths in `meta.outputs` are relative to `process.cwd()` - const processingCacheDirOutputPath = path.relative( - process.cwd(), - processingCacheDir - ) + const meta = result.metafile! - for (const id in deps) { - const optimizedInfo = metadata.optimized[id] - optimizedInfo.needsInterop = needsInterop( - id, - idToExports[id], - meta.outputs, - processingCacheDirOutputPath - ) - const output = - meta.outputs[ - path.relative(process.cwd(), getProcessingDepPath(id, config)) - ] - if (output) { - // We only need to hash the output.imports in to check for stability, but adding the hash - // and file path gives us a unique hash that may be useful for other things in the future - optimizedInfo.fileHash = getHash( - metadata.hash + optimizedInfo.file + JSON.stringify(output.imports) - ) - } - } + // the paths in `meta.outputs` are relative to `process.cwd()` + const processingCacheDirOutputPath = path.relative( + process.cwd(), + processingCacheDir + ) - // This only runs when missing deps are processed. Previous optimized deps are stable if - // the newly discovered deps don't have common chunks with them. Comparing their fileHash we - // can find out if it is safe to keep the current browser state. If one of the file hashes - // changed, a full page reload is needed - let alteredFiles = false - if (currentData) { - alteredFiles = Object.keys(currentData.optimized).some((dep) => { - const currentInfo = currentData.optimized[dep] - const info = metadata.optimized[dep] - return ( - !info?.fileHash || - !currentInfo?.fileHash || - info?.fileHash !== currentInfo?.fileHash - ) - }) - debug(`optimized deps have altered files: ${alteredFiles}`) - } + for (const id in depsInfo) { + const output = esbuildOutputFromId(meta.outputs, id, processingCacheDir) + + addOptimizedDepInfo(metadata, 'optimized', { + ...depsInfo[id], + needsInterop: needsInterop(id, idToExports[id], output), + // We only need to hash the output.imports in to check for stability, but adding the hash + // and file path gives us a unique hash that may be useful for other things in the future + fileHash: getHash( + metadata.hash + depsInfo[id].file + JSON.stringify(output.imports) + ), + browserHash: metadata.browserHash + }) + } - for (const o of Object.keys(meta.outputs)) { - if (!o.match(jsMapExtensionRE)) { - const id = path - .relative(processingCacheDirOutputPath, o) - .replace(jsExtensionRE, '') - const file = getOptimizedDepPath(id, config) - if (!findFileInfo(metadata.optimized, file)) { - metadata.chunks[id] = { - file, - src: '', - needsInterop: false, - browserHash: - (!alteredFiles && currentData?.chunks[id]?.browserHash) || - newBrowserHash - } - } + for (const o of Object.keys(meta.outputs)) { + if (!o.match(jsMapExtensionRE)) { + const id = path + .relative(processingCacheDirOutputPath, o) + .replace(jsExtensionRE, '') + const file = getOptimizedDepPath(id, config) + if ( + !findOptimizedDepInfoInRecord( + metadata.optimized, + (depInfo) => depInfo.file === file + ) + ) { + addOptimizedDepInfo(metadata, 'chunks', { + id, + file, + needsInterop: false, + browserHash: metadata.browserHash + }) } } + } - if (alteredFiles) { - metadata.browserHash = newBrowserHash - } + const dataPath = path.join(processingCacheDir, '_metadata.json') + writeFile(dataPath, stringifyOptimizedDepsMetadata(metadata, depsCacheDir)) - debug(`deps bundled in ${(performance.now() - start).toFixed(2)}ms`) + debug(`deps bundled in ${(performance.now() - start).toFixed(2)}ms`) - return { - alteredFiles, - commit() { - if (alteredFiles) { - // Overwrite individual hashes with the new global browserHash, a full page reload is required - // New deps that ended up with a different hash replaced while doing analysis import are going to - // return a not found so the browser doesn't cache them. And will properly get loaded after the reload - for (const id in deps) { - metadata.optimized[id].browserHash = newBrowserHash - } - } - // Write metadata file, delete `deps` folder and rename the new `processing` folder to `deps` in sync - commitProcessingDepsCacheSync() - processing.resolve() - }, - cancel - } + return { + metadata, + commit() { + // Write metadata file, delete `deps` folder and rename the new `processing` folder to `deps` in sync + commitProcessingDepsCacheSync() + }, + cancel } function commitProcessingDepsCacheSync() { - // Rewire the file paths from the temporal processing dir to the final deps cache dir - const dataPath = path.join(processingCacheDir, '_metadata.json') - writeFile(dataPath, stringifyOptimizedDepsMetadata(metadata, depsCacheDir)) // Processing is done, we can now replace the depsCacheDir with processingCacheDir + // Rewire the file paths from the temporal processing dir to the final deps cache dir removeDirSync(depsCacheDir) fs.renameSync(processingCacheDir, depsCacheDir) } function cancel() { removeDirSync(processingCacheDir) - processing.resolve() } } @@ -639,38 +591,20 @@ export function depsFromOptimizedDepInfo( depsInfo: Record ) { return Object.fromEntries( - Object.entries(depsInfo).map((d) => [d[0], d[1].src]) + Object.entries(depsInfo).map((d) => [d[0], d[1].src!]) ) } -export function getHash(text: string) { - return createHash('sha256').update(text).digest('hex').substring(0, 8) -} - -function getOptimizedBrowserHash(hash: string, deps: Record) { - return getHash(hash + JSON.stringify(deps)) -} - -function getCachedDepFilePath(id: string, depsCacheDir: string) { - return normalizePath(path.resolve(depsCacheDir, flattenId(id) + '.js')) -} - export function getOptimizedDepPath(id: string, config: ResolvedConfig) { - return getCachedDepFilePath(id, getDepsCacheDir(config)) + return normalizePath( + path.resolve(getDepsCacheDir(config), flattenId(id) + '.js') + ) } export function getDepsCacheDir(config: ResolvedConfig) { return normalizePath(path.resolve(config.cacheDir, 'deps')) } -function getProcessingDepFilePath(id: string, processingCacheDir: string) { - return normalizePath(path.resolve(processingCacheDir, flattenId(id) + '.js')) -} - -function getProcessingDepPath(id: string, config: ResolvedConfig) { - return getProcessingDepFilePath(id, getProcessingDepsCacheDir(config)) -} - function getProcessingDepsCacheDir(config: ResolvedConfig) { return normalizePath(path.resolve(config.cacheDir, 'processing')) } @@ -701,27 +635,48 @@ export function createIsOptimizedDepUrl(config: ResolvedConfig) { function parseOptimizedDepsMetadata( jsonMetadata: string, depsCacheDir: string -) { - const metadata = JSON.parse(jsonMetadata, (key: string, value: string) => { - // Paths can be absolute or relative to the deps cache dir where - // the _metadata.json is located - if (key === 'file' || key === 'src') { - return normalizePath(path.resolve(depsCacheDir, value)) +): DepOptimizationMetadata | undefined { + const { hash, browserHash, optimized, chunks } = JSON.parse( + jsonMetadata, + (key: string, value: string) => { + // Paths can be absolute or relative to the deps cache dir where + // the _metadata.json is located + if (key === 'file' || key === 'src') { + return normalizePath(path.resolve(depsCacheDir, value)) + } + return value } - return value - }) - const { browserHash } = metadata - for (const o of Object.keys(metadata.optimized)) { - const depInfo = metadata.optimized[o] - depInfo.browserHash = browserHash + ) + if ( + !chunks || + Object.values(optimized).some((depInfo: any) => !depInfo.fileHash) + ) { + // outdated _metadata.json version, ignore + return } - metadata.chunks ||= {} // Support missing chunks for back compat - for (const o of Object.keys(metadata.chunks)) { - const depInfo = metadata.chunks[o] - depInfo.src = '' - depInfo.browserHash = browserHash + const metadata = { + hash, + browserHash, + optimized: {}, + discovered: {}, + chunks: {}, + depInfoList: [] + } + for (const id of Object.keys(optimized)) { + addOptimizedDepInfo(metadata, 'optimized', { + ...optimized[id], + id, + browserHash + }) + } + for (const id of Object.keys(chunks)) { + addOptimizedDepInfo(metadata, 'chunks', { + ...chunks[id], + id, + browserHash, + needsInterop: false + }) } - metadata.discovered = {} return metadata } @@ -735,50 +690,53 @@ function stringifyOptimizedDepsMetadata( metadata: DepOptimizationMetadata, depsCacheDir: string ) { + const { hash, browserHash, optimized, chunks } = metadata return JSON.stringify( - metadata, - (key: string, value: any) => { - if (key === 'discovered' || key === 'processing') { - return - } + { + hash, + browserHash, + optimized: Object.fromEntries( + Object.values(optimized).map( + ({ id, src, file, fileHash, needsInterop }) => [ + id, + { + src, + file, + fileHash, + needsInterop + } + ] + ) + ), + chunks: Object.fromEntries( + Object.values(chunks).map(({ id, file }) => [id, { file }]) + ) + }, + (key: string, value: string) => { + // Paths can be absolute or relative to the deps cache dir where + // the _metadata.json is located if (key === 'file' || key === 'src') { return normalizePath(path.relative(depsCacheDir, value)) } - if (key === 'optimized') { - // Only remove browserHash for individual dep info - const cleaned: Record = {} - for (const dep of Object.keys(value)) { - const { browserHash, ...c } = value[dep] - cleaned[dep] = c - } - return cleaned - } - if (key === 'optimized') { - return Object.keys(value).reduce( - (cleaned: Record, dep: string) => { - const { browserHash, ...c } = value[dep] - cleaned[dep] = c - return cleaned - }, - {} - ) - } - if (key === 'chunks') { - return Object.keys(value).reduce( - (cleaned: Record, dep: string) => { - const { browserHash, needsInterop, src, ...c } = value[dep] - cleaned[dep] = c - return cleaned - }, - {} - ) - } return value }, 2 ) } +function esbuildOutputFromId( + outputs: Record, + id: string, + cacheDirOutputPath: string +): any { + const flatId = flattenId(id) + '.js' + return outputs[ + normalizePath( + path.relative(process.cwd(), path.join(cacheDirOutputPath, flatId)) + ) + ] +} + // https://github.com/vitejs/vite/issues/1724#issuecomment-767619642 // a list of modules that pretends to be ESM but still uses `require`. // this causes esbuild to wrap them as CJS even when its entry appears to be ESM. @@ -787,8 +745,7 @@ const KNOWN_INTEROP_IDS = new Set(['moment']) function needsInterop( id: string, exportsData: ExportsData, - outputs: Record, - cacheDirOutputPath: string + output: { exports: string[] } ): boolean { if (KNOWN_INTEROP_IDS.has(id)) { return true @@ -802,17 +759,7 @@ function needsInterop( // if a peer dependency used require() on a ESM dependency, esbuild turns the // ESM dependency's entry chunk into a single default export... detect // such cases by checking exports mismatch, and force interop. - const flatId = flattenId(id) + '.js' - let generatedExports: string[] | undefined - for (const output in outputs) { - if ( - normalizePath(output) === - normalizePath(path.join(cacheDirOutputPath, flatId)) - ) { - generatedExports = outputs[output].exports - break - } - } + const generatedExports: string[] = output.exports if ( !generatedExports || @@ -829,8 +776,8 @@ function isSingleDefaultExport(exports: readonly string[]) { const lockfileFormats = ['package-lock.json', 'yarn.lock', 'pnpm-lock.yaml'] -function getDepHash(root: string, config: ResolvedConfig): string { - let content = lookupFile(root, lockfileFormats) || '' +export function getDepHash(config: ResolvedConfig): string { + let content = lookupFile(config.root, lockfileFormats) || '' // also take config into account // only a subset of config options that can affect dep optimization content += JSON.stringify( @@ -860,27 +807,44 @@ function getDepHash(root: string, config: ResolvedConfig): string { return value } ) - return createHash('sha256').update(content).digest('hex').substring(0, 8) + return getHash(content) +} + +function getOptimizedBrowserHash( + hash: string, + deps: Record, + timestamp = '' +) { + return getHash(hash + JSON.stringify(deps) + timestamp) } -export function optimizeDepInfoFromFile( +export function getHash(text: string): string { + return createHash('sha256').update(text).digest('hex').substring(0, 8) +} + +export function optimizedDepInfoFromId( metadata: DepOptimizationMetadata, - file: string + id: string ): OptimizedDepInfo | undefined { return ( - findFileInfo(metadata.optimized, file) || - findFileInfo(metadata.discovered, file) || - findFileInfo(metadata.chunks, file) + metadata.optimized[id] || metadata.discovered[id] || metadata.chunks[id] ) } -function findFileInfo( - dependenciesInfo: Record, +export function optimizedDepInfoFromFile( + metadata: DepOptimizationMetadata, file: string +): OptimizedDepInfo | undefined { + return metadata.depInfoList.find((depInfo) => depInfo.file === file) +} + +function findOptimizedDepInfoInRecord( + dependenciesInfo: Record, + callbackFn: (depInfo: OptimizedDepInfo, id: string) => any ): OptimizedDepInfo | undefined { for (const o of Object.keys(dependenciesInfo)) { const info = dependenciesInfo[o] - if (info.file === file) { + if (callbackFn(info, o)) { return info } } @@ -890,7 +854,7 @@ export async function optimizedDepNeedsInterop( metadata: DepOptimizationMetadata, file: string ): Promise { - const depInfo = optimizeDepInfoFromFile(metadata, file) + const depInfo = optimizedDepInfoFromFile(metadata, file) if (!depInfo) return undefined diff --git a/packages/vite/src/node/optimizer/registerMissing.ts b/packages/vite/src/node/optimizer/registerMissing.ts index 256098f351e2e8..65508ac5ae5155 100644 --- a/packages/vite/src/node/optimizer/registerMissing.ts +++ b/packages/vite/src/node/optimizer/registerMissing.ts @@ -1,18 +1,26 @@ import colors from 'picocolors' +import _debug from 'debug' import { - createOptimizeDepsRun, + runOptimizeDeps, getOptimizedDepPath, getHash, depsFromOptimizedDepInfo, - newDepOptimizationProcessing + newDepOptimizationProcessing, + loadCachedDepOptimizationMetadata, + createOptimizedDepsMetadata, + addOptimizedDepInfo, + discoverProjectDependencies, + depsLogString, + debuggerViteDeps as debug } from '.' import type { - DepOptimizationMetadata, DepOptimizationProcessing, - OptimizedDepInfo + OptimizedDepInfo, + OptimizedDeps } from '.' import type { ViteDevServer } from '..' -import { resolveSSRExternal } from '../ssr/ssrExternal' + +const isDebugEnabled = _debug('vite:deps').enabled /** * The amount to wait for requests to register newly found dependencies before triggering @@ -20,16 +28,35 @@ import { resolveSSRExternal } from '../ssr/ssrExternal' */ const debounceMs = 100 -export function createMissingImporterRegisterFn( - server: ViteDevServer, - initialProcessingPromise: Promise -): (id: string, resolved: string, ssr?: boolean) => OptimizedDepInfo { - const { logger } = server.config - let metadata = server._optimizeDepsMetadata! +export function createOptimizedDeps(server: ViteDevServer): OptimizedDeps { + const { config } = server + const { logger } = config + + const sessionTimestamp = Date.now().toString() + + const cachedMetadata = loadCachedDepOptimizationMetadata(config) + + const optimizedDeps: OptimizedDeps = { + metadata: + cachedMetadata || createOptimizedDepsMetadata(config, sessionTimestamp), + registerMissingImport + } let handle: NodeJS.Timeout | undefined let newDepsDiscovered = false + let newDepsToLog: string[] = [] + let newDepsToLogHandle: NodeJS.Timeout | undefined + const logNewDeps = () => { + config.logger.info( + colors.green(`✨ dependencies optimized: ${depsLogString(newDepsToLog)}`), + { + timestamp: true + } + ) + newDepsToLog = [] + } + let depOptimizationProcessing = newDepOptimizationProcessing() let depOptimizationProcessingQueue: DepOptimizationProcessing[] = [] const resolveEnqueuedProcessingPromises = () => { @@ -41,40 +68,80 @@ export function createMissingImporterRegisterFn( } let enqueuedRerun: (() => void) | undefined - let currentlyProcessing = true - initialProcessingPromise.then(() => { - currentlyProcessing = false - enqueuedRerun?.() - }) + let currentlyProcessing = false - async function rerun(ssr: boolean | undefined) { - // debounce time to wait for new missing deps finished, issue a new - // optimization of deps (both old and newly found) once the previous - // optimizeDeps processing is finished + // If there wasn't a cache or it is outdated, perform a fast scan with esbuild + // to quickly find project dependencies and do a first optimize run + if (!cachedMetadata) { + currentlyProcessing = true + + const scanPhaseProcessing = newDepOptimizationProcessing() + optimizedDeps.scanProcessing = scanPhaseProcessing.promise + + const warmUp = async () => { + try { + debug(colors.green(`scanning for dependencies...`), { + timestamp: true + }) + + const { metadata } = optimizedDeps + + const discovered = await discoverProjectDependencies( + config, + sessionTimestamp + ) + + // Respect the scan phase discover order to improve reproducibility + for (const depInfo of Object.values(discovered)) { + addOptimizedDepInfo(metadata, 'discovered', { + ...depInfo, + processing: depOptimizationProcessing.promise + }) + } + + debug( + colors.green( + `dependencies found: ${depsLogString(Object.keys(discovered))}` + ), + { + timestamp: true + } + ) + + scanPhaseProcessing.resolve() + optimizedDeps.scanProcessing = undefined + + runOptimizer() + } catch (e) { + logger.error(e.message) + if (optimizedDeps.scanProcessing) { + scanPhaseProcessing.resolve() + optimizedDeps.scanProcessing = undefined + } + } + } + + setTimeout(warmUp, 0) + } + + async function runOptimizer() { + // Ensure that rerun is called sequentially + enqueuedRerun = undefined + currentlyProcessing = true + + // Ensure that a rerun will not be issued for current discovered deps + if (handle) clearTimeout(handle) // a succesful completion of the optimizeDeps rerun will end up // creating new bundled version of all current and discovered deps // in the cache dir and a new metadata info object assigned - // to server._optimizeDepsMetadata. A fullReload is only issued if + // to optimizeDeps.metadata. A fullReload is only issued if // the previous bundled dependencies have changed. - // if the rerun fails, server._optimizeDepsMetadata remains untouched, + // if the rerun fails, optimizeDeps.metadata remains untouched, // current discovered deps are cleaned, and a fullReload is issued - // Ensure that rerun is called sequentially - enqueuedRerun = undefined - currentlyProcessing = true - - logger.info( - colors.yellow( - `new dependencies found: ${Object.keys(metadata.discovered).join( - ', ' - )}, updating...` - ), - { - timestamp: true - } - ) + let { metadata } = optimizedDeps // All deps, previous known and newly discovered are rebundled, // respect insertion order to keep the metadata file stable @@ -85,9 +152,10 @@ export function createMissingImporterRegisterFn( for (const dep of Object.keys(metadata.optimized)) { newDeps[dep] = { ...metadata.optimized[dep] } } - // Don't clone discovered info objects, they are read after awaited for (const dep of Object.keys(metadata.discovered)) { - newDeps[dep] = metadata.discovered[dep] + // Clone the discovered info discarding its processing promise + const { processing, ...info } = metadata.discovered[dep] + newDeps[dep] = info } newDepsDiscovered = false @@ -100,51 +168,86 @@ export function createMissingImporterRegisterFn( // dependencies will be asigned this promise from this point depOptimizationProcessing = newDepOptimizationProcessing() - let newData: DepOptimizationMetadata | null = null - try { - const optimizeDeps = await createOptimizeDepsRun( - server.config, - true, - false, - metadata, - newDeps, - ssr - ) - - const processingResult = await optimizeDeps.run() + const processingResult = await runOptimizeDeps(config, newDeps) + + const newData = processingResult.metadata + + // After a re-optimization, if the internal bundled chunks change a full page reload + // is required. If the files are stable, we can avoid the reload that is expensive + // for large applications. Comparing their fileHash we can find out if it is safe to + // keep the current browser state. + const needsReload = + metadata.hash !== newData.hash || + Object.keys(metadata.optimized).some((dep) => { + return ( + metadata.optimized[dep].fileHash !== newData.optimized[dep].fileHash + ) + }) const commitProcessing = () => { processingResult.commit() - newData = optimizeDeps.metadata + // While optimizeDeps is running, new missing deps may be discovered, + // in which case they will keep being added to metadata.discovered + for (const id in metadata.discovered) { + if (!newData.optimized[id]) { + addOptimizedDepInfo(newData, 'discovered', metadata.discovered[id]) + } + } - // update ssr externals - if (ssr) { - server._ssrExternals = resolveSSRExternal( - server.config, - Object.keys(newData.optimized) - ) + // If we don't reload the page, we need to keep browserHash stable + if (!needsReload) { + newData.browserHash = metadata.browserHash + for (const dep in newData.chunks) { + newData.chunks[dep].browserHash = metadata.browserHash + } + for (const dep in newData.optimized) { + newData.optimized[dep].browserHash = ( + metadata.optimized[dep] || metadata.discovered[dep] + ).browserHash + } } - // While optimizeDeps is running, new missing deps may be discovered, - // in which case they will keep being added to metadata.discovered - for (const o of Object.keys(metadata.discovered)) { - if (!newData.optimized[o]) { - newData.discovered[o] = metadata.discovered[o] + // Commit hash and needsInterop changes to the discovered deps info + // object. Allow for code to await for the discovered processing promise + // and use the information in the same object + for (const o in newData.optimized) { + const discovered = metadata.discovered[o] + if (discovered) { + const optimized = newData.optimized[o] + discovered.browserHash = optimized.browserHash + discovered.fileHash = optimized.fileHash + discovered.needsInterop = optimized.needsInterop + discovered.processing = undefined } } - metadata = server._optimizeDepsMetadata = newData + newDepsToLog.push( + ...Object.keys(newData.optimized).filter( + (dep) => !metadata.optimized[dep] + ) + ) + + metadata = optimizedDeps.metadata = newData resolveEnqueuedProcessingPromises() } - if (!processingResult.alteredFiles) { + if (!needsReload) { commitProcessing() - logger.info(colors.green(`✨ new dependencies pre-bundled...`), { - timestamp: true - }) + if (isDebugEnabled) { + logNewDeps() + debug(colors.green(`✨ previous optimized dependencies unchanged`), { + timestamp: true + }) + } else { + if (newDepsToLogHandle) clearTimeout(newDepsToLogHandle) + newDepsToLogHandle = setTimeout(() => { + newDepsToLogHandle = undefined + logNewDeps() + }, 2 * debounceMs) + } } else { if (newDepsDiscovered) { // There are newly discovered deps, and another rerun is about to be @@ -153,7 +256,7 @@ export function createMissingImporterRegisterFn( // once a rerun is committed processingResult.cancel() - logger.info( + debug( colors.green( `✨ delaying reload as new dependencies have been found...` ), @@ -164,8 +267,14 @@ export function createMissingImporterRegisterFn( } else { commitProcessing() + if (newDepsToLogHandle) clearTimeout(newDepsToLogHandle) + newDepsToLogHandle = undefined + logNewDeps() + logger.info( - colors.green(`✨ dependencies updated, reloading page...`), + colors.green( + `✨ previous optimized dependencies have changed, reloading page` + ), { timestamp: true } @@ -202,7 +311,17 @@ export function createMissingImporterRegisterFn( }) } - const discoveredTimestamp = Date.now() + async function rerun() { + // debounce time to wait for new missing deps finished, issue a new + // optimization of deps (both old and newly found) once the previous + // optimizeDeps processing is finished + const deps = Object.keys(optimizedDeps.metadata.discovered) + const depsString = depsLogString(deps) + debug(colors.green(`new dependencies found: ${depsString}`), { + timestamp: true + }) + runOptimizer() + } function getDiscoveredBrowserHash( hash: string, @@ -210,18 +329,21 @@ export function createMissingImporterRegisterFn( missing: Record ) { return getHash( - hash + - JSON.stringify(deps) + - JSON.stringify(missing) + - discoveredTimestamp + hash + JSON.stringify(deps) + JSON.stringify(missing) + sessionTimestamp ) } - return function registerMissingImport( + function registerMissingImport( id: string, resolved: string, ssr?: boolean ): OptimizedDepInfo { + if (optimizedDeps.scanProcessing) { + config.logger.error( + 'Vite internal error: registering missing import before initial scanning is over' + ) + } + const { metadata } = optimizedDeps const optimized = metadata.optimized[id] if (optimized) { return optimized @@ -237,7 +359,8 @@ export function createMissingImporterRegisterFn( return missing } newDepsDiscovered = true - missing = metadata.discovered[id] = { + missing = addOptimizedDepInfo(metadata, 'discovered', { + id, file: getOptimizedDepPath(id, server.config), src: resolved, // Assing a browserHash to this missing dependency that is unique to @@ -252,15 +375,17 @@ export function createMissingImporterRegisterFn( // loading of this pre-bundled dep needs to await for its processing // promise to be resolved processing: depOptimizationProcessing.promise - } + }) // Debounced rerun, let other missing dependencies be discovered before // the running next optimizeDeps enqueuedRerun = undefined if (handle) clearTimeout(handle) + if (newDepsToLogHandle) clearTimeout(newDepsToLogHandle) + newDepsToLogHandle = undefined handle = setTimeout(() => { handle = undefined - enqueuedRerun = () => rerun(ssr) + enqueuedRerun = rerun if (!currentlyProcessing) { enqueuedRerun() } @@ -270,4 +395,6 @@ export function createMissingImporterRegisterFn( // esbuild is run to generate the pre-bundle return missing } + + return optimizedDeps } diff --git a/packages/vite/src/node/optimizer/scan.ts b/packages/vite/src/node/optimizer/scan.ts index d1ac4eb249a8f3..549f748d9a2b58 100644 --- a/packages/vite/src/node/optimizer/scan.ts +++ b/packages/vite/src/node/optimizer/scan.ts @@ -123,11 +123,19 @@ export async function scanImports(config: ResolvedConfig): Promise<{ debug(`Scan completed in ${(performance.now() - start).toFixed(2)}ms:`, deps) return { - deps, + // Ensure a fixed order so hashes are stable and improve logs + deps: orderedDependencies(deps), missing } } +function orderedDependencies(deps: Record) { + const depsList = Object.entries(deps) + // Ensure the same browserHash for the same set of dependencies + depsList.sort((a, b) => a[0].localeCompare(b[0])) + return Object.fromEntries(depsList) +} + function globEntries(pattern: string | string[], config: ResolvedConfig) { return glob(pattern, { cwd: config.root, @@ -165,7 +173,10 @@ function esbuildScanPlugin( } const resolved = await container.resolveId( id, - importer && normalizePath(importer) + importer && normalizePath(importer), + { + scan: true + } ) const res = resolved?.id seen.set(key, res) diff --git a/packages/vite/src/node/plugin.ts b/packages/vite/src/node/plugin.ts index 36674e242bd33e..354b246dd9f182 100644 --- a/packages/vite/src/node/plugin.ts +++ b/packages/vite/src/node/plugin.ts @@ -121,7 +121,14 @@ export interface Plugin extends RollupPlugin { this: PluginContext, source: string, importer: string | undefined, - options: { custom?: CustomPluginOptions; ssr?: boolean } + options: { + custom?: CustomPluginOptions + ssr?: boolean + /** + * @internal + */ + scan?: boolean + } ): Promise | ResolveIdResult load?( this: PluginContext, diff --git a/packages/vite/src/node/plugins/importAnalysis.ts b/packages/vite/src/node/plugins/importAnalysis.ts index a4d2acc44db953..e7336ae4947677 100644 --- a/packages/vite/src/node/plugins/importAnalysis.ts +++ b/packages/vite/src/node/plugins/importAnalysis.ts @@ -197,19 +197,20 @@ export function importAnalysisPlugin(config: ResolvedConfig): Plugin { } let importerFile = importer - if ( - moduleListContains(config.optimizeDeps?.exclude, url) && - server._optimizeDepsMetadata - ) { - // if the dependency encountered in the optimized file was excluded from the optimization - // the dependency needs to be resolved starting from the original source location of the optimized file - // because starting from node_modules/.vite will not find the dependency if it was not hoisted - // (that is, if it is under node_modules directory in the package source of the optimized file) - for (const optimizedModule of Object.values( - server._optimizeDepsMetadata.optimized - )) { - if (optimizedModule.file === importerModule.file) { - importerFile = optimizedModule.src + if (moduleListContains(config.optimizeDeps?.exclude, url)) { + const optimizedDeps = server._optimizedDeps + if (optimizedDeps) { + await optimizedDeps.scanProcessing + + // if the dependency encountered in the optimized file was excluded from the optimization + // the dependency needs to be resolved starting from the original source location of the optimized file + // because starting from node_modules/.vite will not find the dependency if it was not hoisted + // (that is, if it is under node_modules directory in the package source of the optimized file) + for (const optimizedModule of optimizedDeps.metadata.depInfoList) { + if (!optimizedModule.src) continue // Ignore chunks + if (optimizedModule.file === importerModule.file) { + importerFile = optimizedModule.src + } } } } @@ -439,6 +440,7 @@ export function importAnalysisPlugin(config: ResolvedConfig): Plugin { importRewrites.push(async () => { let rewriteDone = false if ( + server?._optimizedDeps && isOptimizedDepFile(resolvedId, config) && !resolvedId.match(optimizedDepChunkRE) ) { @@ -450,7 +452,7 @@ export function importAnalysisPlugin(config: ResolvedConfig): Plugin { const file = cleanUrl(resolvedId) // Remove ?v={hash} const needsInterop = await optimizedDepNeedsInterop( - server._optimizeDepsMetadata!, + server._optimizedDeps!.metadata, file ) diff --git a/packages/vite/src/node/plugins/optimizedDeps.ts b/packages/vite/src/node/plugins/optimizedDeps.ts index 8fbdca8d08905f..adab1bd9756251 100644 --- a/packages/vite/src/node/plugins/optimizedDeps.ts +++ b/packages/vite/src/node/plugins/optimizedDeps.ts @@ -3,7 +3,7 @@ import type { Plugin } from '../plugin' import colors from 'picocolors' import { DEP_VERSION_RE } from '../constants' import { cleanUrl, createDebugger } from '../utils' -import { isOptimizedDepFile, optimizeDepInfoFromFile } from '../optimizer' +import { isOptimizedDepFile, optimizedDepInfoFromFile } from '../optimizer' import type { ViteDevServer } from '..' export const ERR_OPTIMIZE_DEPS_PROCESSING_ERROR = @@ -25,7 +25,7 @@ export function optimizedDepsPlugin(): Plugin { async load(id) { if (server && isOptimizedDepFile(id, server.config)) { - const metadata = server?._optimizeDepsMetadata + const metadata = server?._optimizedDeps?.metadata if (metadata) { const file = cleanUrl(id) const versionMatch = id.match(DEP_VERSION_RE) @@ -34,7 +34,7 @@ export function optimizedDepsPlugin(): Plugin { : undefined // Search in both the currently optimized and newly discovered deps - const info = optimizeDepInfoFromFile(metadata, file) + const info = optimizedDepInfoFromFile(metadata, file) if (info) { if (browserHash && info.browserHash !== browserHash) { throwOutdatedRequest(id) @@ -49,9 +49,9 @@ export function optimizedDepsPlugin(): Plugin { throwProcessingError(id) return } - const newMetadata = server._optimizeDepsMetadata + const newMetadata = server._optimizedDeps?.metadata if (metadata !== newMetadata) { - const currentInfo = optimizeDepInfoFromFile(newMetadata!, file) + const currentInfo = optimizedDepInfoFromFile(newMetadata!, file) if (info.browserHash !== currentInfo?.browserHash) { throwOutdatedRequest(id) } diff --git a/packages/vite/src/node/plugins/preAlias.ts b/packages/vite/src/node/plugins/preAlias.ts index 75a0d8e5e6f9dc..dadb16aa4c28a9 100644 --- a/packages/vite/src/node/plugins/preAlias.ts +++ b/packages/vite/src/node/plugins/preAlias.ts @@ -13,9 +13,9 @@ export function preAliasPlugin(): Plugin { configureServer(_server) { server = _server }, - resolveId(id, importer, options) { - if (!options?.ssr && bareImportRE.test(id)) { - return tryOptimizedResolve(id, server, importer) + async resolveId(id, importer, options) { + if (!options?.ssr && bareImportRE.test(id) && !options?.scan) { + return await tryOptimizedResolve(id, server, importer) } } } diff --git a/packages/vite/src/node/plugins/resolve.ts b/packages/vite/src/node/plugins/resolve.ts index 5dfaffafcbb7e4..8f6a48a28da463 100644 --- a/packages/vite/src/node/plugins/resolve.ts +++ b/packages/vite/src/node/plugins/resolve.ts @@ -33,7 +33,8 @@ import { import { createIsOptimizedDepUrl, isOptimizedDepFile, - optimizeDepInfoFromFile + optimizedDepInfoFromFile, + optimizedDepInfoFromId } from '../optimizer' import type { OptimizedDepInfo } from '../optimizer' import type { ViteDevServer, SSROptions } from '..' @@ -83,6 +84,8 @@ export interface InternalResolveOptions extends ResolveOptions { // should also try import from `.ts/tsx/mts/cts` source file as fallback. isFromTsImporter?: boolean tryEsmOnly?: boolean + // True when resolving during the scan phase to discover dependencies + scan?: boolean } export function resolvePlugin(baseOptions: InternalResolveOptions): Plugin { @@ -106,7 +109,7 @@ export function resolvePlugin(baseOptions: InternalResolveOptions): Plugin { isOptimizedDepUrl = createIsOptimizedDepUrl(server.config) }, - resolveId(id, importer, resolveOpts) { + async resolveId(id, importer, resolveOpts) { const ssr = resolveOpts?.ssr === true if (id.startsWith(browserExternalId)) { return id @@ -127,7 +130,8 @@ export function resolvePlugin(baseOptions: InternalResolveOptions): Plugin { isRequire, ...baseOptions, - isFromTsImporter: isTsRequest(importer ?? '') + isFromTsImporter: isTsRequest(importer ?? ''), + scan: resolveOpts?.scan ?? baseOptions.scan } let res: string | PartialResolvedId | undefined @@ -136,9 +140,10 @@ export function resolvePlugin(baseOptions: InternalResolveOptions): Plugin { // tryFileResolve or /fs/ resolution but these files may not yet // exists if we are in the middle of a deps re-processing if (asSrc && isOptimizedDepUrl?.(id)) { - return id.startsWith(FS_PREFIX) + const optimizedPath = id.startsWith(FS_PREFIX) ? fsPathFromId(id) : normalizePath(ensureVolumeInPath(path.resolve(root, id.slice(1)))) + return optimizedPath } // explicit fs paths that starts with /@fs/* @@ -169,12 +174,15 @@ export function resolvePlugin(baseOptions: InternalResolveOptions): Plugin { const normalizedFsPath = normalizePath(fsPath) - if (server && isOptimizedDepFile(normalizedFsPath, server!.config)) { + if ( + server?._optimizedDeps && + isOptimizedDepFile(normalizedFsPath, server!.config) + ) { // Optimized files could not yet exist in disk, resolve to the full path // Inject the current browserHash version if the path doesn't have one if (!normalizedFsPath.match(DEP_VERSION_RE)) { - const browserHash = optimizeDepInfoFromFile( - server._optimizeDepsMetadata!, + const browserHash = optimizedDepInfoFromFile( + server._optimizedDeps!.metadata!, normalizedFsPath )?.browserHash if (browserHash) { @@ -252,7 +260,8 @@ export function resolvePlugin(baseOptions: InternalResolveOptions): Plugin { asSrc && server && !ssr && - (res = tryOptimizedResolve(id, server, importer)) + !options.scan && + (res = await tryOptimizedResolve(id, server, importer)) ) { return res } @@ -606,7 +615,8 @@ export function tryNodeResolve( if ( !resolved.includes('node_modules') || // linked !server || // build - !server._registerMissingImport // initial esbuild scan phase + !server._optimizedDeps || // resolving before listening to the server + options.scan // initial esbuild scan phase ) { return { id: resolved } } @@ -627,14 +637,17 @@ export function tryNodeResolve( // otherwise we may introduce duplicated modules for externalized files // from pre-bundled deps. - const versionHash = server._optimizeDepsMetadata?.browserHash + const versionHash = server._optimizedDeps!.metadata.browserHash if (versionHash && isJsType) { resolved = injectQuery(resolved, `v=${versionHash}`) } } else { // this is a missing import, queue optimize-deps re-run and // get a resolved its optimized info - const optimizedInfo = server._registerMissingImport!(id, resolved, ssr) + const optimizedInfo = server._optimizedDeps!.registerMissingImport( + id, + resolved + ) resolved = getOptimizedUrl(optimizedInfo) } return { id: resolved! } @@ -644,24 +657,20 @@ export function tryNodeResolve( const getOptimizedUrl = (optimizedData: OptimizedDepInfo) => `${optimizedData.file}?v=${optimizedData.browserHash}` -export function tryOptimizedResolve( +export async function tryOptimizedResolve( id: string, server: ViteDevServer, importer?: string -): string | undefined { - const depData = server._optimizeDepsMetadata +): Promise { + const optimizedDeps = server._optimizedDeps - if (!depData) return + if (!optimizedDeps) return - // check if id has been optimized - const isOptimized = depData.optimized[id] - if (isOptimized) { - return getOptimizedUrl(isOptimized) - } + await optimizedDeps.scanProcessing - const isChunk = depData.chunks[id] - if (isChunk) { - return getOptimizedUrl(isChunk) + const depInfo = optimizedDepInfoFromId(optimizedDeps.metadata, id) + if (depInfo) { + return getOptimizedUrl(depInfo) } if (!importer) return @@ -669,7 +678,10 @@ export function tryOptimizedResolve( // further check if id is imported by nested dependency let resolvedSrc: string | undefined - for (const [pkgPath, optimizedData] of Object.entries(depData.optimized)) { + for (const optimizedData of optimizedDeps.metadata.depInfoList) { + if (!optimizedData.src) continue // Ignore chunks + + const pkgPath = optimizedData.id // check for scenarios, e.g. // pkgPath => "my-lib > foo" // id => "foo" diff --git a/packages/vite/src/node/server/index.ts b/packages/vite/src/node/server/index.ts index d56a6a705d5416..e8d4c3f1e5626d 100644 --- a/packages/vite/src/node/server/index.ts +++ b/packages/vite/src/node/server/index.ts @@ -44,8 +44,6 @@ import { transformRequest } from './transformRequest' import type { ESBuildTransformResult } from '../plugins/esbuild' import { transformWithEsbuild } from '../plugins/esbuild' import type { TransformOptions as EsbuildTransformOptions } from 'esbuild' -import type { DepOptimizationMetadata, OptimizedDepInfo } from '../optimizer' -import { createOptimizeDepsRun } from '../optimizer' import { ssrLoadModule } from '../ssr/ssrModuleLoader' import { resolveSSRExternal } from '../ssr/ssrExternal' import { @@ -53,7 +51,8 @@ import { ssrRewriteStacktrace } from '../ssr/ssrStacktrace' import { ssrTransform } from '../ssr/ssrTransform' -import { createMissingImporterRegisterFn } from '../optimizer/registerMissing' +import { createOptimizedDeps } from '../optimizer/registerMissing' +import type { OptimizedDeps } from '../optimizer' import { resolveHostname } from '../utils' import { searchForWorkspaceRoot } from './searchRoot' import { CLIENT_DIR } from '../constants' @@ -257,7 +256,7 @@ export interface ViteDevServer { /** * @internal */ - _optimizeDepsMetadata: DepOptimizationMetadata | null + _optimizedDeps: OptimizedDeps | null /** * Deps that are externalized * @internal @@ -284,16 +283,6 @@ export interface ViteDevServer { * @internal */ _forceOptimizeOnRestart: boolean - /** - * @internal - */ - _registerMissingImport: - | (( - id: string, - resolved: string, - ssr: boolean | undefined - ) => OptimizedDepInfo) - | null /** * @internal */ @@ -372,16 +361,18 @@ export async function createServer( }, transformIndexHtml: null!, // to be immediately set async ssrLoadModule(url, opts?: { fixStacktrace?: boolean }) { - let configFileDependencies: string[] = [] - const metadata = server._optimizeDepsMetadata - if (metadata) { - configFileDependencies = Object.keys(metadata.optimized) + if (!server._ssrExternals) { + let knownImports: string[] = [] + const optimizedDeps = server._optimizedDeps + if (optimizedDeps) { + await optimizedDeps.scanProcessing + knownImports = [ + ...Object.keys(optimizedDeps.metadata.optimized), + ...Object.keys(optimizedDeps.metadata.discovered) + ] + } + server._ssrExternals = resolveSSRExternal(config, knownImports) } - - server._ssrExternals ||= resolveSSRExternal( - config, - configFileDependencies - ) return ssrLoadModule( url, server, @@ -434,12 +425,11 @@ export async function createServer( return server._restartPromise }, - _optimizeDepsMetadata: null, + _optimizedDeps: null, _ssrExternals: null, _globImporters: Object.create(null), _restartPromise: null, _forceOptimizeOnRestart: false, - _registerMissingImport: null, _pendingRequests: new Map() } @@ -581,39 +571,15 @@ export async function createServer( // error handler middlewares.use(errorMiddleware(server, !!middlewareMode)) - const runOptimize = async () => { - const optimizeDeps = await createOptimizeDepsRun( - config, - config.server.force - ) - - // Don't await for the optimization to finish, we can start the - // server right away here - server._optimizeDepsMetadata = optimizeDeps.metadata - - // Run deps optimization in parallel - const initialProcessingPromise = optimizeDeps - .run() - .then((result) => result.commit()) - - // While running the first optimizeDeps, _registerMissingImport is null - // so the resolve plugin resolves straight to node_modules during the - // deps discovery scan phase - server._registerMissingImport = createMissingImporterRegisterFn( - server, - initialProcessingPromise - ) - } - if (!middlewareMode && httpServer) { let isOptimized = false - // overwrite listen to run optimizer before server start + // overwrite listen to init optimizer before server start const listen = httpServer.listen.bind(httpServer) httpServer.listen = (async (port: number, ...args: any[]) => { if (!isOptimized) { try { await container.buildStart({}) - await runOptimize() + server._optimizedDeps = createOptimizedDeps(server) isOptimized = true } catch (e) { httpServer.emit('error', e) @@ -624,7 +590,7 @@ export async function createServer( }) as any } else { await container.buildStart({}) - await runOptimize() + server._optimizedDeps = createOptimizedDeps(server) } return server diff --git a/packages/vite/src/node/server/pluginContainer.ts b/packages/vite/src/node/server/pluginContainer.ts index 2aab3e03097c99..590648ed58d164 100644 --- a/packages/vite/src/node/server/pluginContainer.ts +++ b/packages/vite/src/node/server/pluginContainer.ts @@ -90,6 +90,10 @@ export interface PluginContainer { options?: { skip?: Set ssr?: boolean + /** + * @internal + */ + scan?: boolean } ): Promise transform( @@ -212,6 +216,7 @@ export async function createPluginContainer( class Context implements PluginContext { meta = minimalContext.meta ssr = false + _scan = false _activePlugin: Plugin | null _activeId: string | null = null _activeCode: string | null = null @@ -241,7 +246,11 @@ export async function createPluginContainer( skip = new Set(this._resolveSkips) skip.add(this._activePlugin) } - let out = await container.resolveId(id, importer, { skip, ssr: this.ssr }) + let out = await container.resolveId(id, importer, { + skip, + ssr: this.ssr, + scan: this._scan + }) if (typeof out === 'string') out = { id: out } return out as ResolvedId | null } @@ -487,8 +496,10 @@ export async function createPluginContainer( async resolveId(rawId, importer = join(root, 'index.html'), options) { const skip = options?.skip const ssr = options?.ssr + const scan = !!options?.scan const ctx = new Context() ctx.ssr = !!ssr + ctx._scan = scan ctx._resolveSkips = skip const resolveStart = isDebug ? performance.now() : 0 @@ -505,7 +516,7 @@ export async function createPluginContainer( ctx as any, rawId, importer, - { ssr } + { ssr, scan } ) if (!result) continue