diff --git a/packages/vite/src/node/index.ts b/packages/vite/src/node/index.ts index a6d74e5dfd46fc..fe67e8efac8f8c 100644 --- a/packages/vite/src/node/index.ts +++ b/packages/vite/src/node/index.ts @@ -32,7 +32,10 @@ export type { } from './preview' export type { DepOptimizationMetadata, - DepOptimizationOptions + DepOptimizationOptions, + DepOptimizationResult, + DepOptimizationProcessing, + OptimizedDepInfo } from './optimizer' export type { Plugin } from './plugin' export type { PackageCache, PackageData } from './packages' diff --git a/packages/vite/src/node/optimizer/index.ts b/packages/vite/src/node/optimizer/index.ts index 302ce66842a276..0cae19ec0f0769 100644 --- a/packages/vite/src/node/optimizer/index.ts +++ b/packages/vite/src/node/optimizer/index.ts @@ -91,6 +91,34 @@ export interface DepOptimizationOptions { extensions?: string[] } +export interface DepOptimizationResult { + /** + * After a re-optimization, the internal bundled chunks may change + * and a full page reload is required if that is the case + * If the files are stable, we can avoid the reload that is expensive + * for large applications + */ + alteredFiles: boolean +} + +export interface DepOptimizationProcessing { + promise: Promise + resolve: (result?: DepOptimizationResult) => void +} + +export interface OptimizedDepInfo { + file: string + src: string + needsInterop?: boolean + browserHash?: string + fileHash?: string + /** + * During optimization, ids can still be resolved to their final location + * but the bundles may not yet be saved to disk + */ + processing: Promise +} + export interface DepOptimizationMetadata { /** * The main hash is determined by user config and dependency lockfiles. @@ -103,254 +131,501 @@ export interface DepOptimizationMetadata { * optimized deps. */ browserHash: string - optimized: Record< - string, - { - file: string - src: string - needsInterop: boolean - } - > + /** + * Metadata for each already optimized dependency + */ + optimized: Record + /** + * Metadata for each newly discovered dependency after processing + */ + discovered: Record + /** + * During optimization, ids can still be resolved to their final location + * but the bundles may not yet be saved to disk + */ + processing: Promise } +/** + * Used by Vite CLI when running `vite optimize` + */ export async function optimizeDeps( config: ResolvedConfig, force = config.server.force, asCommand = false, - newDeps?: Record, // missing imports encountered after server has started + newDeps?: Record, // missing imports encountered after server has started ssr?: boolean -): Promise { +): Promise { + const { metadata, run } = await createOptimizeDepsRun( + config, + force, + asCommand, + null, + newDeps, + ssr + ) + await run() + return metadata +} + +/** + * Internally, Vite uses this function to prepare a optimizeDeps run. When Vite starts, we can get + * the metadata and start the server without waiting for the optimizeDeps processing to be completed + */ +export async function createOptimizeDepsRun( + config: ResolvedConfig, + force = config.server.force, + asCommand = false, + currentData: DepOptimizationMetadata | null = null, + newDeps?: Record, // missing imports encountered after server has started + ssr?: boolean +): Promise<{ + metadata: DepOptimizationMetadata + run: () => Promise +}> { config = { ...config, command: 'build' } - const { root, logger, cacheDir } = config + const { root, logger } = config const log = asCommand ? logger.info : debug - const dataPath = path.join(cacheDir, '_metadata.json') + // Before Vite 2.9, dependencies were cached in the root of the cacheDir + // For compat, we remove the cache if we find the old structure + if (fs.existsSync(path.join(config.cacheDir, '_metadata.json'))) { + emptyDir(config.cacheDir) + } + + const depsCacheDir = getDepsCacheDir(config) + const processingCacheDir = getProcessingDepsCacheDir(config) + const mainHash = getDepHash(root, config) - const data: DepOptimizationMetadata = { + + const processing = newDepOptimizationProcessing() + + const metadata: DepOptimizationMetadata = { hash: mainHash, browserHash: mainHash, - optimized: {} + optimized: {}, + discovered: {}, + processing: processing.promise } if (!force) { let prevData: DepOptimizationMetadata | undefined try { - prevData = JSON.parse(fs.readFileSync(dataPath, 'utf-8')) + const prevDataPath = path.join(depsCacheDir, '_metadata.json') + prevData = parseOptimizedDepsMetadata( + fs.readFileSync(prevDataPath, 'utf-8'), + depsCacheDir, + processing.promise + ) } catch (e) {} // hash is consistent, no need to re-bundle - if (prevData && prevData.hash === data.hash) { + if (prevData && prevData.hash === metadata.hash) { log('Hash is consistent. Skipping. Use --force to override.') - return prevData + return { + metadata: prevData, + run: () => (processing.resolve(), processing.promise) + } } } - if (fs.existsSync(cacheDir)) { - emptyDir(cacheDir) + // Create a temporal directory so we don't need to delete optimized deps + // until they have been processed. This also avoids leaving the deps cache + // directory in a corrupted state if there is an error + if (fs.existsSync(processingCacheDir)) { + emptyDir(processingCacheDir) } else { - fs.mkdirSync(cacheDir, { recursive: true }) + fs.mkdirSync(processingCacheDir, { recursive: true }) } + // a hint for Node.js // all files in the cache directory should be recognized as ES modules writeFile( - path.resolve(cacheDir, 'package.json'), + path.resolve(processingCacheDir, 'package.json'), JSON.stringify({ type: 'module' }) ) - let deps: Record, missing: Record + let deps: Record if (!newDeps) { - ;({ deps, missing } = await scanImports(config)) - } else { - deps = newDeps - missing = {} - } + // Initial optimizeDeps at server start. Perform a fast scan using esbuild to + // find deps to pre-bundle and include user hard-coded dependencies - // update browser hash - data.browserHash = createHash('sha256') - .update(data.hash + JSON.stringify(deps)) - .digest('hex') - .substring(0, 8) - - const missingIds = Object.keys(missing) - if (missingIds.length) { - throw new Error( - `The following dependencies are imported but could not be resolved:\n\n ${missingIds - .map( - (id) => - `${colors.cyan(id)} ${colors.white( - colors.dim(`(imported by ${missing[id]})`) - )}` - ) - .join(`\n `)}\n\nAre they installed?` - ) - } + let missing: Record + ;({ deps, missing } = await scanImports(config)) - const include = config.optimizeDeps?.include - if (include) { - const resolve = config.createResolver({ asSrc: false }) - for (const id of include) { - // normalize 'foo >bar` as 'foo > bar' to prevent same id being added - // and for pretty printing - const normalizedId = normalizeId(id) - if (!deps[normalizedId]) { - const entry = await resolve(id) - if (entry) { - deps[normalizedId] = entry - } else { - throw new Error( - `Failed to resolve force included dependency: ${colors.cyan(id)}` + const missingIds = Object.keys(missing) + if (missingIds.length) { + processing.resolve() + throw new Error( + `The following dependencies are imported but could not be resolved:\n\n ${missingIds + .map( + (id) => + `${colors.cyan(id)} ${colors.white( + colors.dim(`(imported by ${missing[id]})`) + )}` ) + .join(`\n `)}\n\nAre they installed?` + ) + } + + const include = config.optimizeDeps?.include + if (include) { + const resolve = config.createResolver({ asSrc: false }) + for (const id of include) { + // normalize 'foo >bar` as 'foo > bar' to prevent same id being added + // and for pretty printing + const normalizedId = normalizeId(id) + if (!deps[normalizedId]) { + const entry = await resolve(id) + if (entry) { + deps[normalizedId] = entry + } else { + processing.resolve() + throw new Error( + `Failed to resolve force included dependency: ${colors.cyan(id)}` + ) + } } } } - } - const qualifiedIds = Object.keys(deps) + // update browser hash + metadata.browserHash = getOptimizedBrowserHash(metadata.hash, deps) + + // We generate the mapping of dependency ids to their cache file location + // before processing the dependencies with esbuild. This allow us to continue + // processing files in the importAnalysis and resolve plugins + for (const id in deps) { + const entry = deps[id] + metadata.optimized[id] = { + file: getOptimizedDepPath(id, config), + src: entry, + browserHash: metadata.browserHash, + processing: processing.promise + } + } + } else { + // Missing dependencies were found at run-time, optimizeDeps called while the + // server is running + deps = depsFromOptimizedDepInfo(newDeps) + + // Clone optimized info objects, fileHash, browserHash may be changed for them + for (const o of Object.keys(newDeps)) { + metadata.optimized[o] = { ...newDeps[o] } + } - if (!qualifiedIds.length) { - writeFile(dataPath, JSON.stringify(data, null, 2)) - log(`No dependencies to bundle. Skipping.\n\n\n`) - return data + // update global browser hash, but keep newDeps individual hashs until we know + // if files are stable so we can avoid a full page reload + metadata.browserHash = getOptimizedBrowserHash(metadata.hash, deps) } - const total = qualifiedIds.length - const maxListed = 5 - const listed = Math.min(total, maxListed) - const extra = Math.max(0, total - maxListed) - const depsString = colors.yellow( - qualifiedIds.slice(0, listed).join(`\n `) + - (extra > 0 ? `\n (...and ${extra} more)` : ``) - ) - if (!asCommand) { - if (!newDeps) { - // This is auto run on server start - let the user know that we are - // pre-optimizing deps - logger.info(colors.green(`Pre-bundling dependencies:\n ${depsString}`)) - logger.info( - `(this will be run only when your dependencies or config have changed)` - ) + return { metadata, run: prebundleDeps } + + async function prebundleDeps(): Promise { + // We prebundle dependencies with esbuild and cache them, but there is no need + // to wait here. Code that needs to access the cached deps needs to await + // the optimizeDepsMetadata.processing promise + + const qualifiedIds = Object.keys(deps) + + if (!qualifiedIds.length) { + // Write metadata file, delete `deps` folder and rename the `processing` folder to `deps` + commitProcessingDepsCacheSync() + log(`No dependencies to bundle. Skipping.\n\n\n`) + processing.resolve() + return } - } else { - logger.info(colors.green(`Optimizing dependencies:\n ${depsString}`)) - } - // esbuild generates nested directory output with lowest common ancestor base - // this is unpredictable and makes it difficult to analyze entry / output - // mapping. So what we do here is: - // 1. flatten all ids to eliminate slash - // 2. in the plugin, read the entry ourselves as virtual files to retain the - // path. - const flatIdDeps: Record = {} - const idToExports: Record = {} - const flatIdToExports: Record = {} - - const { plugins = [], ...esbuildOptions } = - config.optimizeDeps?.esbuildOptions ?? {} - - await init - for (const id in deps) { - const flatId = flattenId(id) - const filePath = (flatIdDeps[flatId] = deps[id]) - let exportsData: ExportsData - if (config.optimizeDeps.extensions?.some((ext) => filePath.endsWith(ext))) { - // For custom supported extensions, build the entry file to transform it into JS, - // and then parse with es-module-lexer. Note that the `bundle` option is not `true`, - // so only the entry file is being transformed. - const result = await build({ - ...esbuildOptions, - plugins, - entryPoints: [filePath], - write: false, - format: 'esm' - }) - exportsData = parse(result.outputFiles[0].text) as ExportsData - } else { - const entryContent = fs.readFileSync(filePath, 'utf-8') - try { - exportsData = parse(entryContent) as ExportsData - } catch { - debug( - `Unable to parse dependency: ${id}. Trying again with a JSX transform.` + const total = qualifiedIds.length + const maxListed = 5 + const listed = Math.min(total, maxListed) + const extra = Math.max(0, total - maxListed) + const depsString = colors.yellow( + qualifiedIds.slice(0, listed).join(`\n `) + + (extra > 0 ? `\n (...and ${extra} more)` : ``) + ) + if (!asCommand) { + if (!newDeps) { + // This is auto run on server start - let the user know that we are + // pre-optimizing deps + logger.info(colors.green(`Pre-bundling dependencies:\n ${depsString}`)) + logger.info( + `(this will be run only when your dependencies or config have changed)` ) - const transformed = await transformWithEsbuild(entryContent, filePath, { - loader: 'jsx' + } + } else { + logger.info(colors.green(`Optimizing dependencies:\n ${depsString}`)) + } + + // esbuild generates nested directory output with lowest common ancestor base + // this is unpredictable and makes it difficult to analyze entry / output + // mapping. So what we do here is: + // 1. flatten all ids to eliminate slash + // 2. in the plugin, read the entry ourselves as virtual files to retain the + // path. + const flatIdDeps: Record = {} + const idToExports: Record = {} + const flatIdToExports: Record = {} + + const { plugins = [], ...esbuildOptions } = + config.optimizeDeps?.esbuildOptions ?? {} + + await init + for (const id in deps) { + const flatId = flattenId(id) + const filePath = (flatIdDeps[flatId] = deps[id]) + let exportsData: ExportsData + if ( + config.optimizeDeps.extensions?.some((ext) => filePath.endsWith(ext)) + ) { + // For custom supported extensions, build the entry file to transform it into JS, + // and then parse with es-module-lexer. Note that the `bundle` option is not `true`, + // so only the entry file is being transformed. + const result = await build({ + ...esbuildOptions, + plugins, + entryPoints: [filePath], + write: false, + format: 'esm' }) - // Ensure that optimization won't fail by defaulting '.js' to the JSX parser. - // This is useful for packages such as Gatsby. - esbuildOptions.loader = { - '.js': 'jsx', - ...esbuildOptions.loader + exportsData = parse(result.outputFiles[0].text) as ExportsData + } else { + const entryContent = fs.readFileSync(filePath, 'utf-8') + try { + exportsData = parse(entryContent) as ExportsData + } catch { + debug( + `Unable to parse dependency: ${id}. Trying again with a JSX transform.` + ) + const transformed = await transformWithEsbuild( + entryContent, + filePath, + { + loader: 'jsx' + } + ) + // Ensure that optimization won't fail by defaulting '.js' to the JSX parser. + // This is useful for packages such as Gatsby. + esbuildOptions.loader = { + '.js': 'jsx', + ...esbuildOptions.loader + } + exportsData = parse(transformed.code) as ExportsData } - exportsData = parse(transformed.code) as ExportsData - } - for (const { ss, se } of exportsData[0]) { - const exp = entryContent.slice(ss, se) - if (/export\s+\*\s+from/.test(exp)) { - exportsData.hasReExports = true + for (const { ss, se } of exportsData[0]) { + const exp = entryContent.slice(ss, se) + if (/export\s+\*\s+from/.test(exp)) { + exportsData.hasReExports = true + } } } + + idToExports[id] = exportsData + flatIdToExports[flatId] = exportsData } - idToExports[id] = exportsData - flatIdToExports[flatId] = exportsData - } - const define: Record = { - 'process.env.NODE_ENV': JSON.stringify(config.mode) - } - for (const key in config.define) { - const value = config.define[key] - define[key] = typeof value === 'string' ? value : JSON.stringify(value) - } + const define: Record = { + 'process.env.NODE_ENV': JSON.stringify(config.mode) + } + for (const key in config.define) { + const value = config.define[key] + define[key] = typeof value === 'string' ? value : JSON.stringify(value) + } + + const start = performance.now() + + const result = await build({ + absWorkingDir: process.cwd(), + entryPoints: Object.keys(flatIdDeps), + bundle: true, + format: 'esm', + target: config.build.target || undefined, + external: config.optimizeDeps?.exclude, + logLevel: 'error', + splitting: true, + sourcemap: true, + outdir: processingCacheDir, + ignoreAnnotations: true, + metafile: true, + define, + plugins: [ + ...plugins, + esbuildDepPlugin(flatIdDeps, flatIdToExports, config, ssr) + ], + ...esbuildOptions + }) + + const meta = result.metafile! + + // the paths in `meta.outputs` are relative to `process.cwd()` + const processingCacheDirOutputPath = path.relative( + process.cwd(), + processingCacheDir + ) - const start = performance.now() - - const result = await build({ - absWorkingDir: process.cwd(), - entryPoints: Object.keys(flatIdDeps), - bundle: true, - format: 'esm', - target: config.build.target || undefined, - external: config.optimizeDeps?.exclude, - logLevel: 'error', - splitting: true, - sourcemap: true, - outdir: cacheDir, - ignoreAnnotations: true, - metafile: true, - define, - plugins: [ - ...plugins, - esbuildDepPlugin(flatIdDeps, flatIdToExports, config, ssr) - ], - ...esbuildOptions - }) - - const meta = result.metafile! - - // the paths in `meta.outputs` are relative to `process.cwd()` - const cacheDirOutputPath = path.relative(process.cwd(), cacheDir) - - for (const id in deps) { - const entry = deps[id] - data.optimized[id] = { - file: normalizePath(path.resolve(cacheDir, flattenId(id) + '.js')), - src: entry, - needsInterop: needsInterop( + for (const id in deps) { + const optimizedInfo = metadata.optimized[id] + optimizedInfo.needsInterop = needsInterop( id, idToExports[id], meta.outputs, - cacheDirOutputPath + processingCacheDirOutputPath ) + const output = + meta.outputs[path.relative(process.cwd(), optimizedInfo.file)] + if (output) { + // We only need to hash the output.imports in to check for stability, but adding the hash + // and file path gives us a unique hash that may be useful for other things in the future + optimizedInfo.fileHash = getHash( + metadata.hash + optimizedInfo.file + JSON.stringify(output.imports) + ) + } + } + + // This only runs when missing deps are processed. Previous optimized deps are stable if + // the newly discovered deps don't have common chunks with them. Comparing their fileHash we + // can find out if it is safe to keep the current browser state. If one of the file hashes + // changed, a full page reload is needed + let alteredFiles = false + if (currentData) { + alteredFiles = Object.keys(currentData.optimized).some((dep) => { + const currentInfo = currentData.optimized[dep] + const info = metadata.optimized[dep] + return ( + !info?.fileHash || + !currentInfo?.fileHash || + info?.fileHash !== currentInfo?.fileHash + ) + }) + debug(`optimized deps have altered files: ${alteredFiles}`) } + + if (alteredFiles) { + // Overrite individual hashes with the new global browserHash, a full page reload is required + // New deps that ended up with a different hash replaced while doing analysis import are going to + // return a not found so the browser doesn't cache them. And will properly get loaded after the reload + for (const id in deps) { + metadata.optimized[id].browserHash = metadata.browserHash + } + } + + // Write metadata file, delete `deps` folder and rename the new `processing` folder to `deps` in sync + commitProcessingDepsCacheSync() + + debug(`deps bundled in ${(performance.now() - start).toFixed(2)}ms`) + processing.resolve({ alteredFiles }) + return processing.promise + } + + function commitProcessingDepsCacheSync() { + // Rewire the file paths from the temporal processing dir to the final deps cache dir + const dataPath = path.join(processingCacheDir, '_metadata.json') + writeFile(dataPath, stringifyOptimizedDepsMetadata(metadata)) + // Processing is done, we can now replace the depsCacheDir with processingCacheDir + if (fs.existsSync(depsCacheDir)) { + const rmSync = fs.rmSync ?? fs.rmdirSync // TODO: Remove after support for Node 12 is dropped + rmSync(depsCacheDir, { recursive: true }) + } + fs.renameSync(processingCacheDir, depsCacheDir) + } +} + +export function newDepOptimizationProcessing(): DepOptimizationProcessing { + let resolve: (result?: DepOptimizationResult) => void + const promise = new Promise((_resolve) => { + resolve = _resolve + }) as Promise + return { promise, resolve: resolve! } +} + +// Convert to { id: src } +export function depsFromOptimizedDepInfo( + depsInfo: Record +) { + return Object.fromEntries( + Object.entries(depsInfo).map((d) => [d[0], d[1].src]) + ) +} + +function getHash(text: string) { + return createHash('sha256').update(text).digest('hex').substring(0, 8) +} + +export function getOptimizedBrowserHash( + hash: string, + deps: Record, + missing?: Record +) { + // update browser hash + return getHash( + hash + JSON.stringify(deps) + (missing ? JSON.stringify(missing) : '') + ) +} + +function getCachedDepFilePath(id: string, depsCacheDir: string) { + return normalizePath(path.resolve(depsCacheDir, flattenId(id) + '.js')) +} + +export function getOptimizedDepPath(id: string, config: ResolvedConfig) { + return getCachedDepFilePath(id, getDepsCacheDir(config)) +} + +export function getDepsCacheDir(config: ResolvedConfig) { + return normalizePath(path.resolve(config.cacheDir, 'deps')) +} + +export function getProcessingDepsCacheDir(config: ResolvedConfig) { + return normalizePath(path.resolve(config.cacheDir, 'processing')) +} + +export function isOptimizedDepFile(id: string, config: ResolvedConfig) { + return id.startsWith(getDepsCacheDir(config)) +} + +export function createIsOptimizedDepUrl(config: ResolvedConfig) { + const { root } = config + const depsCacheDir = getDepsCacheDir(config) + + // determine the url prefix of files inside cache directory + const depsCacheDirRelative = normalizePath(path.relative(root, depsCacheDir)) + const depsCacheDirPrefix = depsCacheDirRelative.startsWith('../') + ? // if the cache directory is outside root, the url prefix would be something + // like '/@fs/absolute/path/to/node_modules/.vite' + `/@fs/${normalizePath(depsCacheDir).replace(/^\//, '')}` + : // if the cache directory is inside root, the url prefix would be something + // like '/node_modules/.vite' + `/${depsCacheDirRelative}` + + return function isOptimizedDepUrl(url: string): boolean { + return url.startsWith(depsCacheDirPrefix) } +} + +function parseOptimizedDepsMetadata( + jsonMetadata: string, + depsCacheDir: string, + processing: Promise +) { + const metadata = JSON.parse(jsonMetadata) + for (const o of Object.keys(metadata.optimized)) { + metadata.optimized[o].processing = processing + } + return { ...metadata, discovered: {}, processing } +} - writeFile(dataPath, JSON.stringify(data, null, 2)) +function stringifyOptimizedDepsMetadata(metadata: DepOptimizationMetadata) { + return JSON.stringify( + metadata, + (key: string, value: any) => { + if (key === 'processing' || key === 'discovered') return - debug(`deps bundled in ${(performance.now() - start).toFixed(2)}ms`) - return data + return value + }, + 2 + ) } // https://github.com/vitejs/vite/issues/1724#issuecomment-767619642 diff --git a/packages/vite/src/node/optimizer/registerMissing.ts b/packages/vite/src/node/optimizer/registerMissing.ts index 2d0c8b98a99a20..cdc9e8006c1f52 100644 --- a/packages/vite/src/node/optimizer/registerMissing.ts +++ b/packages/vite/src/node/optimizer/registerMissing.ts @@ -1,5 +1,16 @@ import colors from 'picocolors' -import { optimizeDeps } from '.' +import { + createOptimizeDepsRun, + getOptimizedDepPath, + getOptimizedBrowserHash, + depsFromOptimizedDepInfo, + newDepOptimizationProcessing +} from '.' +import type { + DepOptimizationMetadata, + DepOptimizationResult, + OptimizedDepInfo +} from '.' import type { ViteDevServer } from '..' import { resolveSSRExternal } from '../ssr/ssrExternal' @@ -11,21 +22,53 @@ const debounceMs = 100 export function createMissingImporterRegisterFn( server: ViteDevServer -): (id: string, resolved: string, ssr?: boolean) => void { +): (id: string, resolved: string, ssr?: boolean) => OptimizedDepInfo { const { logger } = server.config - let knownOptimized = server._optimizeDepsMetadata!.optimized - let currentMissing: Record = {} + let metadata = server._optimizeDepsMetadata! + let handle: NodeJS.Timeout | undefined + let needFullReload: boolean = false + + let depOptimizationProcessing = newDepOptimizationProcessing() - let pendingResolve: (() => void) | null = null + let lastDepOptimizationPromise = metadata.processing async function rerun(ssr: boolean | undefined) { - const newDeps = currentMissing - currentMissing = {} + // debounce time to wait for new missing deps finished, issue a new + // optimization of deps (both old and newly found) once the previous + // optimizeDeps processing is finished + + // a succesful completion of the optimizeDeps rerun will end up + // creating new bundled version of all current and discovered deps + // in the cache dir and a new metadata info object assigned + // to server._optimizeDepsMetadata. A fullReload is only issued if + // the previous bundled dependencies have changed. + + // if the rerun fails, server._optimizeDepsMetadata remains untouched, + // current discovered deps are cleaned, and a fullReload is issued + + // optimizeDeps needs to be run in serie. Await until the previous + // rerun is finished here. It could happen that two reruns are queued + // in that case, we only need to run one of them + const awaitedOptimizeDepsPromise = lastDepOptimizationPromise + + await lastDepOptimizationPromise + + if (awaitedOptimizeDepsPromise !== lastDepOptimizationPromise) { + // There were two or more rerun queued and one of them already + // started. Only let through the first one, and discard the others + return + } + + if (handle) { + // New deps could have been found here, skip this rerun. Once the + // debounce time is over, a new rerun will be issued + return + } logger.info( colors.yellow( - `new dependencies found: ${Object.keys(newDeps).join( + `new dependencies found: ${Object.keys(metadata.discovered).join( ', ' )}, updating...` ), @@ -34,50 +77,100 @@ export function createMissingImporterRegisterFn( } ) - for (const id in knownOptimized) { - newDeps[id] = knownOptimized[id].src - } + // All deps, previous known and newly discovered are rebundled, + // respect insertion order to keep the metadata file stable + const newDeps = { ...metadata.optimized, ...metadata.discovered } + const thisDepOptimizationProcessing = depOptimizationProcessing - try { - // Nullify previous metadata so that the resolver won't - // resolve to optimized files during the optimizer re-run - server._isRunningOptimizer = true - server._optimizeDepsMetadata = null + // Other rerun will await until this run is finished + lastDepOptimizationPromise = thisDepOptimizationProcessing.promise + + let processingResult: DepOptimizationResult | undefined - const newData = (server._optimizeDepsMetadata = await optimizeDeps( + // Create a new promise for the next rerun, discovered missing + // dependencies will be asigned this promise from this point + depOptimizationProcessing = newDepOptimizationProcessing() + + let newData: DepOptimizationMetadata | null = null + + try { + const optimizeDeps = await createOptimizeDepsRun( server.config, true, false, + metadata, newDeps, ssr - )) - knownOptimized = newData!.optimized + ) + + // We await the optimizeDeps run here, we are only going to use + // the newData if there wasn't an error + newData = optimizeDeps.metadata + processingResult = await optimizeDeps.run() // update ssr externals if (ssr) { server._ssrExternals = resolveSSRExternal( server.config, - Object.keys(knownOptimized) + Object.keys(newData.optimized) ) } - logger.info(colors.green(`✨ dependencies updated, reloading page...`), { - timestamp: true - }) + // While optimizeDeps is running, new missing deps may be discovered, + // in which case they will keep being added to metadata.discovered + for (const o of Object.keys(metadata.discovered)) { + if (!newData.optimized[o] && !newData.discovered[o]) { + newData.discovered[o] = metadata.discovered[o] + delete metadata.discovered[o] + } + } + newData.processing = thisDepOptimizationProcessing.promise + metadata = server._optimizeDepsMetadata = newData + + if (!needFullReload && !processingResult?.alteredFiles) { + logger.info(colors.green(`✨ new dependencies pre-bundled...`), { + timestamp: true + }) + } else { + if (Object.keys(metadata.discovered).length > 0) { + // There are newly discovered deps, and another rerun is about to be + // excecuted. Avoid the current full reload, but queue it for the next one + needFullReload = true + logger.info( + colors.green( + `✨ dependencies updated, delaying reload as new dependencies have been found...` + ), + { + timestamp: true + } + ) + } else { + logger.info( + colors.green(`✨ dependencies updated, reloading page...`), + { + timestamp: true + } + ) + fullReload() + } + } } catch (e) { logger.error( colors.red(`error while updating dependencies:\n${e.stack}`), { timestamp: true, error: e } ) + + // Reset missing deps, let the server rediscover the dependencies + metadata.discovered = {} + fullReload() } finally { - server._isRunningOptimizer = false - if (!handle) { - // No other rerun() pending so resolve and let pending requests proceed - pendingResolve && pendingResolve() - server._pendingReload = pendingResolve = null - } + // Rerun finished, resolve the promise to let awaiting requests or + // other rerun queued be processed + thisDepOptimizationProcessing.resolve() } + } + function fullReload() { // Cached transform results have stale imports (resolved to // old locations) so they need to be invalidated before the page is // reloaded. @@ -87,25 +180,52 @@ export function createMissingImporterRegisterFn( type: 'full-reload', path: '*' }) + + needFullReload = false } return function registerMissingImport( id: string, resolved: string, ssr?: boolean - ) { - if (!knownOptimized[id]) { - currentMissing[id] = resolved - if (handle) clearTimeout(handle) - handle = setTimeout(() => { - handle = undefined - rerun(ssr) - }, debounceMs) - if (!server._pendingReload) { - server._pendingReload = new Promise((r) => { - pendingResolve = r - }) - } + ): OptimizedDepInfo { + const optimized = metadata.optimized[id] + if (optimized) { + return optimized + } + let missing = metadata.discovered[id] + if (missing) { + // We are already discover this dependency + // It will be processed in the next rerun call + return missing } + missing = metadata.discovered[id] = { + file: getOptimizedDepPath(id, server.config), + src: resolved, + // Assing a browserHash to this missing dependency that is unique to + // the current state of known + missing deps. If its optimizeDeps run + // doesn't alter the bundled files of previous known dependendencies, + // we don't need a full reload and this browserHash will be kept + browserHash: getOptimizedBrowserHash( + metadata.hash, + depsFromOptimizedDepInfo(metadata.optimized), + depsFromOptimizedDepInfo(metadata.discovered) + ), + // loading of this pre-bundled dep needs to await for its processing + // promise to be resolved + processing: depOptimizationProcessing.promise + } + + // Debounced rerun, let other missing dependencies be discovered before + // the running next optimizeDeps + if (handle) clearTimeout(handle) + handle = setTimeout(() => { + handle = undefined + rerun(ssr) + }, debounceMs) + + // Return the path for the optimized bundle, this path is known before + // esbuild is run to generate the pre-bundle + return missing } } diff --git a/packages/vite/src/node/plugins/importAnalysis.ts b/packages/vite/src/node/plugins/importAnalysis.ts index 082aa35dfae213..3ff2000b6d3f7d 100644 --- a/packages/vite/src/node/plugins/importAnalysis.ts +++ b/packages/vite/src/node/plugins/importAnalysis.ts @@ -46,6 +46,11 @@ import { makeLegalIdentifier } from '@rollup/pluginutils' import { shouldExternalizeForSSR } from '../ssr/ssrExternal' import { performance } from 'perf_hooks' import { transformRequest } from '../server/transformRequest' +import { + isOptimizedDepFile, + createIsOptimizedDepUrl, + getDepsCacheDir +} from '../optimizer' const isDebug = !!process.env.DEBUG const debug = createDebugger('vite:import-analysis') @@ -55,6 +60,8 @@ const clientDir = normalizePath(CLIENT_DIR) const skipRE = /\.(map|json)$/ const canSkip = (id: string) => skipRE.test(id) || isDirectCSSRequest(id) +const optimizedDepChunkRE = /\/chunk-[A-Z0-9]{8}\.js/ + function isExplicitImportRequired(url: string) { return !isJSRequest(cleanUrl(url)) && !isCSSRequest(url) } @@ -100,12 +107,14 @@ export function importAnalysisPlugin(config: ResolvedConfig): Plugin { const clientPublicPath = path.posix.join(base, CLIENT_PUBLIC_PATH) let server: ViteDevServer + let isOptimizedDepUrl: (url: string) => boolean return { name: 'vite:import-analysis', configureServer(_server) { server = _server + isOptimizedDepUrl = createIsOptimizedDepUrl(server.config) }, async transform(source, importer, options) { @@ -221,8 +230,12 @@ export function importAnalysisPlugin(config: ResolvedConfig): Plugin { if (resolved.id.startsWith(root + '/')) { // in root: infer short absolute path from root url = resolved.id.slice(root.length) - } else if (fs.existsSync(cleanUrl(resolved.id))) { - // exists but out of root: rewrite to absolute /@fs/ paths + } else if ( + resolved.id.startsWith(getDepsCacheDir(config)) || + fs.existsSync(cleanUrl(resolved.id)) + ) { + // an optimized deps may not yet exists in the filesystem, or + // a regular file exists but is out of root: rewrite to absolute /@fs/ paths url = path.posix.join(FS_PREFIX + resolved.id) } else { url = resolved.id @@ -245,11 +258,16 @@ export function importAnalysisPlugin(config: ResolvedConfig): Plugin { // mark non-js/css imports with `?import` url = markExplicitImport(url) + // If the url isn't a request for a pre-bundled common chunk, // for relative js/css imports, or self-module virtual imports // (e.g. vue blocks), inherit importer's version query // do not do this for unknown type imports, otherwise the appended // query can break 3rd party plugin's extension checks. - if ((isRelative || isSelfImport) && !/[\?&]import=?\b/.test(url)) { + if ( + (isRelative || isSelfImport) && + !(isOptimizedDepUrl(url) && optimizedDepChunkRE.test(url)) && + !/[\?&]import=?\b/.test(url) + ) { const versionMatch = importer.match(DEP_VERSION_RE) if (versionMatch) { url = injectQuery(url, versionMatch[1]) @@ -397,35 +415,53 @@ export function importAnalysisPlugin(config: ResolvedConfig): Plugin { specifier, start ) - let url = normalizedUrl + const url = normalizedUrl // record as safe modules server?.moduleGraph.safeModulesPath.add(fsPathFromUrl(url)) // rewrite if (url !== specifier) { - // for optimized cjs deps, support named imports by rewriting named - // imports to const assignments. - if (resolvedId.endsWith(`&es-interop`)) { - url = url.slice(0, -11) - if (isDynamicImport) { - // rewrite `import('package')` to expose the default directly - str().overwrite( - dynamicIndex, - end + 1, - `import('${url}').then(m => m.default && m.default.__esModule ? m.default : ({ ...m.default, default: m.default }))` - ) - } else { - const exp = source.slice(expStart, expEnd) - const rewritten = transformCjsImport(exp, url, rawUrl, index) - if (rewritten) { - str().overwrite(expStart, expEnd, rewritten) + let rewriteDone = false + if (isOptimizedDepFile(resolvedId, config)) { + // for optimized cjs deps, support named imports by rewriting named + // imports to const assignments. + const optimizeDepsMetadata = server._optimizeDepsMetadata! + const { optimized } = optimizeDepsMetadata + + // The browserHash in resolvedId could be stale in which case there will be a full + // page reload. We could return a 404 in that case but it is safe to return the request + const file = cleanUrl(resolvedId) // Remove ?v={hash} + const dep = Object.keys(optimized).find( + (k) => optimized[k].file === file + ) + + // Wait until the dependency has been pre-bundled + dep && (await optimized[dep].processing) + + if (dep && optimized[dep].needsInterop) { + debug(`${dep} needs interop`) + if (isDynamicImport) { + // rewrite `import('package')` to expose the default directly + str().overwrite( + dynamicIndex, + end + 1, + `import('${url}').then(m => m.default && m.default.__esModule ? m.default : ({ ...m.default, default: m.default }))` + ) } else { - // #1439 export * from '...' - str().overwrite(start, end, url) + const exp = source.slice(expStart, expEnd) + const rewritten = transformCjsImport(exp, url, rawUrl, index) + if (rewritten) { + str().overwrite(expStart, expEnd, rewritten) + } else { + // #1439 export * from '...' + str().overwrite(start, end, url) + } } + rewriteDone = true } - } else { + } + if (!rewriteDone) { str().overwrite(start, end, isDynamicImport ? `'${url}'` : url) } } diff --git a/packages/vite/src/node/plugins/index.ts b/packages/vite/src/node/plugins/index.ts index 5213714febb12b..825798a73c8493 100644 --- a/packages/vite/src/node/plugins/index.ts +++ b/packages/vite/src/node/plugins/index.ts @@ -3,6 +3,7 @@ import type { Plugin } from '../plugin' import aliasPlugin from '@rollup/plugin-alias' import { jsonPlugin } from './json' import { resolvePlugin } from './resolve' +import { optimizedDepsPlugin } from './optimizedDeps' import { esbuildPlugin } from './esbuild' import { importAnalysisPlugin } from './importAnalysis' import { cssPlugin, cssPostPlugin } from './css' @@ -47,6 +48,7 @@ export async function resolvePlugins( ssrConfig: config.ssr, asSrc: true }), + isBuild ? null : optimizedDepsPlugin(), htmlInlineProxyPlugin(config), cssPlugin(config), config.esbuild !== false ? esbuildPlugin(config.esbuild) : null, diff --git a/packages/vite/src/node/plugins/optimizedDeps.ts b/packages/vite/src/node/plugins/optimizedDeps.ts new file mode 100644 index 00000000000000..c7e03927cf6574 --- /dev/null +++ b/packages/vite/src/node/plugins/optimizedDeps.ts @@ -0,0 +1,117 @@ +import { promises as fs } from 'fs' +import type { Plugin } from '../plugin' +import colors from 'picocolors' +import { DEP_VERSION_RE } from '../constants' +import { cleanUrl, createDebugger } from '../utils' +import { isOptimizedDepFile } from '../optimizer' +import type { DepOptimizationMetadata, OptimizedDepInfo } from '../optimizer' +import type { ViteDevServer } from '..' + +export const ERR_OPTIMIZE_DEPS_PROCESSING_ERROR = + 'ERR_OPTIMIZE_DEPS_PROCESSING_ERROR' +export const ERR_OUTDATED_OPTIMIZED_DEP = 'ERR_OUTDATED_OPTIMIZED_DEP' + +const isDebug = process.env.DEBUG +const debug = createDebugger('vite:optimize-deps') + +export function optimizedDepsPlugin(): Plugin { + let server: ViteDevServer | undefined + + return { + name: 'vite:optimized-deps', + + configureServer(_server) { + server = _server + }, + + async load(id) { + if (server && isOptimizedDepFile(id, server.config)) { + const metadata = server?._optimizeDepsMetadata + if (metadata) { + const file = cleanUrl(id) + const versionMatch = id.match(DEP_VERSION_RE) + const browserHash = versionMatch + ? versionMatch[1].split('=')[1] + : undefined + const info = optimizeDepInfoFromFile(metadata, file) + if (info) { + if (browserHash && info.browserHash !== browserHash) { + throwOutdatedRequest(id) + } + try { + // This is an entry point, it may still not be bundled + await info.processing + } catch { + // If the refresh has not happened after timeout, Vite considers + // something unexpected has happened. In this case, Vite + // returns an empty response that will error. + throwProcessingError(id) + return + } + const newMetadata = server._optimizeDepsMetadata + if (metadata !== newMetadata) { + const currentInfo = optimizeDepInfoFromFile(newMetadata!, file) + if (info.browserHash !== currentInfo?.browserHash) { + throwOutdatedRequest(id) + } + } + } + isDebug && debug(`load ${colors.cyan(file)}`) + // Load the file from the cache instead of waiting for other plugin + // load hooks to avoid race conditions, once processing is resolved, + // we are sure that the file has been properly save to disk + try { + return await fs.readFile(file, 'utf-8') + } catch (e) { + // Outdated non-entry points (CHUNK), loaded after a rerun + throwOutdatedRequest(id) + } + } + } + } + } +} + +function throwProcessingError(id: string) { + const err: any = new Error( + `Something unexpected happened while optimizing "${id}". ` + + `The current page should have reloaded by now` + ) + err.code = ERR_OPTIMIZE_DEPS_PROCESSING_ERROR + // This error will be caught by the transform middleware that will + // send a 504 status code request timeout + throw err +} + +function throwOutdatedRequest(id: string) { + const err: any = new Error( + `There is a new version of the pre-bundle for "${id}", ` + + `a page reload is going to ask for it.` + ) + err.code = ERR_OUTDATED_OPTIMIZED_DEP + // This error will be caught by the transform middleware that will + // send a 504 status code request timeout + throw err +} + +function optimizeDepInfoFromFile( + metadata: DepOptimizationMetadata, + file: string +): OptimizedDepInfo | undefined { + return ( + findFileInfo(metadata.optimized, file) || + findFileInfo(metadata.discovered, file) + ) +} + +function findFileInfo( + dependenciesInfo: Record, + file: string +): OptimizedDepInfo | undefined { + for (const o of Object.keys(dependenciesInfo)) { + const info = dependenciesInfo[o] + if (info.file === file) { + return info + } + } +} diff --git a/packages/vite/src/node/plugins/resolve.ts b/packages/vite/src/node/plugins/resolve.ts index ce899abbdf1dc5..b5493aa7d2ef83 100644 --- a/packages/vite/src/node/plugins/resolve.ts +++ b/packages/vite/src/node/plugins/resolve.ts @@ -29,6 +29,8 @@ import { isPossibleTsOutput, getPotentialTsSrcPaths } from '../utils' +import { createIsOptimizedDepUrl } from '../optimizer' +import type { OptimizedDepInfo } from '../optimizer' import type { ViteDevServer, SSROptions } from '..' import type { PartialResolvedId } from 'rollup' import { resolve as _resolveExports } from 'resolve.exports' @@ -87,6 +89,7 @@ export function resolvePlugin(baseOptions: InternalResolveOptions): Plugin { preferRelative = false } = baseOptions let server: ViteDevServer | undefined + let isOptimizedDepUrl: (url: string) => boolean const { target: ssrTarget, noExternal: ssrNoExternal } = ssrConfig ?? {} @@ -95,6 +98,7 @@ export function resolvePlugin(baseOptions: InternalResolveOptions): Plugin { configureServer(_server) { server = _server + isOptimizedDepUrl = createIsOptimizedDepUrl(server.config) }, resolveId(id, importer, resolveOpts) { @@ -123,6 +127,15 @@ export function resolvePlugin(baseOptions: InternalResolveOptions): Plugin { let res: string | PartialResolvedId | undefined + // resolve pre-bundled deps requests, these could be resolved by + // tryFileResolve or /fs/ resolution but these files may not yet + // exists if we are in the middle of a deps re-processing + if (asSrc && isOptimizedDepUrl?.(id)) { + return id.startsWith(FS_PREFIX) + ? fsPathFromId(id) + : normalizePath(ensureVolumeInPath(path.resolve(root, id.slice(1)))) + } + // explicit fs paths that starts with /@fs/* if (asSrc && id.startsWith(FS_PREFIX)) { const fsPath = fsPathFromId(id) @@ -572,8 +585,7 @@ export function tryNodeResolve( if ( !resolved.includes('node_modules') || // linked !server || // build - server._isRunningOptimizer || // optimizing - !server._optimizeDepsMetadata + !server._registerMissingImport // initial esbuild scan phase ) { return { id: resolved } } @@ -593,19 +605,24 @@ export function tryNodeResolve( // can cache it without re-validation, but only do so for known js types. // otherwise we may introduce duplicated modules for externalized files // from pre-bundled deps. - const versionHash = server._optimizeDepsMetadata?.browserHash + + const versionHash = server._optimizeDepsMetadata?.hash if (versionHash && isJsType) { resolved = injectQuery(resolved, `v=${versionHash}`) } } else { - // this is a missing import. - // queue optimize-deps re-run. - server._registerMissingImport?.(id, resolved, ssr) + // this is a missing import, queue optimize-deps re-run and + // get a resolved its optimized info + const optimizedInfo = server._registerMissingImport!(id, resolved, ssr) + resolved = getOptimizedUrl(optimizedInfo) } - return { id: resolved } + return { id: resolved! } } } +const getOptimizedUrl = (optimizedData: OptimizedDepInfo) => + `${optimizedData.file}?v=${optimizedData.browserHash}` + export function tryOptimizedResolve( id: string, server: ViteDevServer, @@ -615,15 +632,6 @@ export function tryOptimizedResolve( if (!depData) return - const getOptimizedUrl = (optimizedData: typeof depData.optimized[string]) => { - return ( - optimizedData.file + - `?v=${depData.browserHash}${ - optimizedData.needsInterop ? `&es-interop` : `` - }` - ) - } - // check if id has been optimized const isOptimized = depData.optimized[id] if (isOptimized) { diff --git a/packages/vite/src/node/server/index.ts b/packages/vite/src/node/server/index.ts index bd3161717405df..9dba9f8ebb92d9 100644 --- a/packages/vite/src/node/server/index.ts +++ b/packages/vite/src/node/server/index.ts @@ -44,8 +44,8 @@ import { transformRequest } from './transformRequest' import type { ESBuildTransformResult } from '../plugins/esbuild' import { transformWithEsbuild } from '../plugins/esbuild' import type { TransformOptions as EsbuildTransformOptions } from 'esbuild' -import type { DepOptimizationMetadata } from '../optimizer' -import { optimizeDeps } from '../optimizer' +import type { DepOptimizationMetadata, OptimizedDepInfo } from '../optimizer' +import { createOptimizeDepsRun } from '../optimizer' import { ssrLoadModule } from '../ssr/ssrModuleLoader' import { resolveSSRExternal } from '../ssr/ssrExternal' import { @@ -280,20 +280,16 @@ export interface ViteDevServer { * @internal */ _forceOptimizeOnRestart: boolean - /** - * @internal - */ - _isRunningOptimizer: boolean /** * @internal */ _registerMissingImport: - | ((id: string, resolved: string, ssr: boolean | undefined) => void) + | (( + id: string, + resolved: string, + ssr: boolean | undefined + ) => OptimizedDepInfo) | null - /** - * @internal - */ - _pendingReload: Promise | null /** * @internal */ @@ -364,12 +360,17 @@ export async function createServer( return transformRequest(url, server, options) }, transformIndexHtml: null!, // to be immediately set - ssrLoadModule(url, opts?: { fixStacktrace?: boolean }) { + async ssrLoadModule(url, opts?: { fixStacktrace?: boolean }) { + let configFileDependencies: string[] = [] + const metadata = server._optimizeDepsMetadata + if (metadata) { + await metadata.processing + configFileDependencies = Object.keys(metadata.optimized) + } + server._ssrExternals ||= resolveSSRExternal( config, - server._optimizeDepsMetadata - ? Object.keys(server._optimizeDepsMetadata.optimized) - : [] + configFileDependencies ) return ssrLoadModule( url, @@ -425,9 +426,7 @@ export async function createServer( _globImporters: Object.create(null), _restartPromise: null, _forceOptimizeOnRestart: false, - _isRunningOptimizer: false, _registerMissingImport: null, - _pendingReload: null, _pendingRequests: new Map() } @@ -570,15 +569,19 @@ export async function createServer( middlewares.use(errorMiddleware(server, !!middlewareMode)) const runOptimize = async () => { - server._isRunningOptimizer = true - try { - server._optimizeDepsMetadata = await optimizeDeps( - config, - config.server.force - ) - } finally { - server._isRunningOptimizer = false - } + const optimizeDeps = await createOptimizeDepsRun( + config, + config.server.force + ) + + // Don't await for the optimization to finish, we can start the + // server right away here + server._optimizeDepsMetadata = optimizeDeps.metadata + optimizeDeps.run() + + // While running the first optimizeDeps, _registerMissingImport is null + // so the resolve plugin resolves straight to node_modules during the + // deps discovery scan phase server._registerMissingImport = createMissingImporterRegisterFn(server) } diff --git a/packages/vite/src/node/server/middlewares/transform.ts b/packages/vite/src/node/server/middlewares/transform.ts index 15f2355e0e389c..ae7bec6e185113 100644 --- a/packages/vite/src/node/server/middlewares/transform.ts +++ b/packages/vite/src/node/server/middlewares/transform.ts @@ -1,3 +1,4 @@ +import { promises as fs } from 'fs' import path from 'path' import type { ViteDevServer } from '..' import type { Connect } from 'types/connect' @@ -11,28 +12,29 @@ import { prettifyUrl, removeImportQuery, removeTimestampQuery, - unwrapId + unwrapId, + fsPathFromId, + ensureVolumeInPath } from '../../utils' import { send } from '../send' import { transformRequest } from '../transformRequest' import { isHTMLProxy } from '../../plugins/html' import colors from 'picocolors' import { - CLIENT_PUBLIC_PATH, DEP_VERSION_RE, - NULL_BYTE_PLACEHOLDER + NULL_BYTE_PLACEHOLDER, + FS_PREFIX } from '../../constants' import { isCSSRequest, isDirectCSSRequest, isDirectRequest } from '../../plugins/css' - -/** - * Time (ms) Vite has to full-reload the page before returning - * an empty response. - */ -const NEW_DEPENDENCY_BUILD_TIMEOUT = 1000 +import { + ERR_OPTIMIZE_DEPS_PROCESSING_ERROR, + ERR_OUTDATED_OPTIMIZED_DEP +} from '../../plugins/optimizedDeps' +import { createIsOptimizedDepUrl } from '../../optimizer' const debugCache = createDebugger('vite:cache') const isDebug = !!process.env.DEBUG @@ -43,19 +45,11 @@ export function transformMiddleware( server: ViteDevServer ): Connect.NextHandleFunction { const { - config: { root, logger, cacheDir }, + config: { root, logger }, moduleGraph } = server - // determine the url prefix of files inside cache directory - const cacheDirRelative = normalizePath(path.relative(root, cacheDir)) - const cacheDirPrefix = cacheDirRelative.startsWith('../') - ? // if the cache directory is outside root, the url prefix would be something - // like '/@fs/absolute/path/to/node_modules/.vite' - `/@fs/${normalizePath(cacheDir).replace(/^\//, '')}` - : // if the cache directory is inside root, the url prefix would be something - // like '/node_modules/.vite' - `/${cacheDirRelative}` + const isOptimizedDepUrl = createIsOptimizedDepUrl(server.config) // Keep the named function. The name is visible in debug logs via `DEBUG=connect:dispatcher ...` return async function viteTransformMiddleware(req, res, next) { @@ -63,36 +57,6 @@ export function transformMiddleware( return next() } - if ( - server._pendingReload && - // always allow vite client requests so that it can trigger page reload - !req.url?.startsWith(CLIENT_PUBLIC_PATH) && - !req.url?.includes('vite/dist/client') - ) { - try { - // missing dep pending reload, hold request until reload happens - await Promise.race([ - server._pendingReload, - // If the refresh has not happened after timeout, Vite considers - // something unexpected has happened. In this case, Vite - // returns an empty response that will error. - new Promise((_, reject) => - setTimeout(reject, NEW_DEPENDENCY_BUILD_TIMEOUT) - ) - ]) - } catch { - // Don't do anything if response has already been sent - if (!res.writableEnded) { - // status code request timeout - res.statusCode = 408 - res.end( - `

[vite] Something unexpected happened while optimizing "${req.url}"

` + - `

The current page should have reloaded by now

` - ) - } - return - } - } let url: string try { url = decodeURI(removeTimestampQuery(req.url!)).replace( @@ -109,15 +73,47 @@ export function transformMiddleware( const isSourceMap = withoutQuery.endsWith('.map') // since we generate source map references, handle those requests here if (isSourceMap) { - const originalUrl = url.replace(/\.map($|\?)/, '$1') - const map = (await moduleGraph.getModuleByUrl(originalUrl, false)) - ?.transformResult?.map - if (map) { - return send(req, res, JSON.stringify(map), 'json', { - headers: server.config.server.headers - }) + if (isOptimizedDepUrl(url)) { + // If the browser is requesting a source map for an optimized dep, it + // means that the dependency has already been pre-bundled and loaded + const mapFile = url.startsWith(FS_PREFIX) + ? fsPathFromId(url) + : normalizePath( + ensureVolumeInPath(path.resolve(root, url.slice(1))) + ) + try { + const map = await fs.readFile(mapFile, 'utf-8') + return send(req, res, map, 'json', { + headers: server.config.server.headers + }) + } catch (e) { + // Outdated source map request for optimized deps, this isn't an error + // but part of the normal flow when re-optimizing after missing deps + // Send back an empty source map so the browser doesn't issue warnings + const dummySourceMap = { + version: 3, + file: mapFile.replace(/\.map$/, ''), + sources: [], + sourcesContent: [], + names: [], + mappings: ';;;;;;;;;' + } + return send(req, res, JSON.stringify(dummySourceMap), 'json', { + cacheControl: 'no-cache', + headers: server.config.server.headers + }) + } } else { - return next() + const originalUrl = url.replace(/\.map($|\?)/, '$1') + const map = (await moduleGraph.getModuleByUrl(originalUrl, false)) + ?.transformResult?.map + if (map) { + return send(req, res, JSON.stringify(map), 'json', { + headers: server.config.server.headers + }) + } else { + return next() + } } } @@ -179,9 +175,7 @@ export function transformMiddleware( }) if (result) { const type = isDirectCSSRequest(url) ? 'css' : 'js' - const isDep = - DEP_VERSION_RE.test(url) || - (cacheDirPrefix && url.startsWith(cacheDirPrefix)) + const isDep = DEP_VERSION_RE.test(url) || isOptimizedDepUrl(url) return send(req, res, result.code, type, { etag: result.etag, // allow browser to cache npm deps! @@ -192,6 +186,30 @@ export function transformMiddleware( } } } catch (e) { + if (e?.code === ERR_OPTIMIZE_DEPS_PROCESSING_ERROR) { + if (!res.writableEnded) { + // Don't do anything if response has already been sent + res.statusCode = 504 // status code request timeout + res.end() + } + // This timeout is unexpected + logger.error(e.message) + return + } + if (e?.code === ERR_OUTDATED_OPTIMIZED_DEP) { + if (!res.writableEnded) { + // Don't do anything if response has already been sent + res.statusCode = 504 // status code request timeout + res.end() + } + // We don't need to log an error in this case, the request + // is outdated because new dependencies were discovered and + // the new pre-bundle dependendencies have changed. + // A full-page reload has been issued, and these old requests + // can't be properly fullfilled. This isn't an unexpected + // error but a normal part of the missing deps discovery flow + return + } return next(e) }