Skip to content

Commit

Permalink
fix: browser cache of newly discovered deps (#7378)
Browse files Browse the repository at this point in the history
  • Loading branch information
patak-dev committed Mar 19, 2022
1 parent f448593 commit 392a0de
Show file tree
Hide file tree
Showing 4 changed files with 148 additions and 26 deletions.
115 changes: 94 additions & 21 deletions packages/vite/src/node/optimizer/index.ts
Expand Up @@ -24,6 +24,9 @@ import { performance } from 'perf_hooks'
const debug = createDebugger('vite:deps')
const isDebugEnabled = _debug('vite:deps').enabled

const jsExtensionRE = /\.js$/i
const jsMapExtensionRE = /\.js\.map$/i

export type ExportsData = ReturnType<typeof parse> & {
// es-module-lexer has a facade detection but isn't always accurate for our
// use case when the module has default export
Expand Down Expand Up @@ -125,7 +128,7 @@ export interface OptimizedDepInfo {
* During optimization, ids can still be resolved to their final location
* but the bundles may not yet be saved to disk
*/
processing: Promise<void>
processing?: Promise<void>
}

export interface DepOptimizationMetadata {
Expand All @@ -144,6 +147,10 @@ export interface DepOptimizationMetadata {
* Metadata for each already optimized dependency
*/
optimized: Record<string, OptimizedDepInfo>
/**
* Metadata for non-entry optimized chunks and dynamic imports
*/
chunks: Record<string, OptimizedDepInfo>
/**
* Metadata for each newly discovered dependency after processing
*/
Expand Down Expand Up @@ -213,6 +220,7 @@ export async function createOptimizeDepsRun(
hash: mainHash,
browserHash: mainHash,
optimized: {},
chunks: {},
discovered: {}
}

Expand All @@ -222,8 +230,7 @@ export async function createOptimizeDepsRun(
const prevDataPath = path.join(depsCacheDir, '_metadata.json')
prevData = parseOptimizedDepsMetadata(
fs.readFileSync(prevDataPath, 'utf-8'),
depsCacheDir,
processing.promise
depsCacheDir
)
} catch (e) {}
// hash is consistent, no need to re-bundle
Expand Down Expand Up @@ -490,7 +497,9 @@ export async function createOptimizeDepsRun(
processingCacheDirOutputPath
)
const output =
meta.outputs[path.relative(process.cwd(), optimizedInfo.file)]
meta.outputs[
path.relative(process.cwd(), getProcessingDepPath(id, config))
]
if (output) {
// We only need to hash the output.imports in to check for stability, but adding the hash
// and file path gives us a unique hash that may be useful for other things in the future
Expand Down Expand Up @@ -518,6 +527,25 @@ export async function createOptimizeDepsRun(
debug(`optimized deps have altered files: ${alteredFiles}`)
}

for (const o of Object.keys(meta.outputs)) {
if (!o.match(jsMapExtensionRE)) {
const id = path
.relative(processingCacheDirOutputPath, o)
.replace(jsExtensionRE, '')
const file = getOptimizedDepPath(id, config)
if (!findFileInfo(metadata.optimized, file)) {
metadata.chunks[id] = {
file,
src: '',
needsInterop: false,
browserHash:
(!alteredFiles && currentData?.chunks[id]?.browserHash) ||
newBrowserHash
}
}
}
}

if (alteredFiles) {
metadata.browserHash = newBrowserHash
}
Expand Down Expand Up @@ -615,19 +643,12 @@ export function depsFromOptimizedDepInfo(
)
}

function getHash(text: string) {
export function getHash(text: string) {
return createHash('sha256').update(text).digest('hex').substring(0, 8)
}

export function getOptimizedBrowserHash(
hash: string,
deps: Record<string, string>,
missing?: Record<string, string>
) {
// update browser hash
return getHash(
hash + JSON.stringify(deps) + (missing ? JSON.stringify(missing) : '')
)
function getOptimizedBrowserHash(hash: string, deps: Record<string, string>) {
return getHash(hash + JSON.stringify(deps))
}

function getCachedDepFilePath(id: string, depsCacheDir: string) {
Expand All @@ -642,7 +663,15 @@ export function getDepsCacheDir(config: ResolvedConfig) {
return normalizePath(path.resolve(config.cacheDir, 'deps'))
}

export function getProcessingDepsCacheDir(config: ResolvedConfig) {
function getProcessingDepFilePath(id: string, processingCacheDir: string) {
return normalizePath(path.resolve(processingCacheDir, flattenId(id) + '.js'))
}

function getProcessingDepPath(id: string, config: ResolvedConfig) {
return getProcessingDepFilePath(id, getProcessingDepsCacheDir(config))
}

function getProcessingDepsCacheDir(config: ResolvedConfig) {
return normalizePath(path.resolve(config.cacheDir, 'processing'))
}

Expand Down Expand Up @@ -671,8 +700,7 @@ export function createIsOptimizedDepUrl(config: ResolvedConfig) {

function parseOptimizedDepsMetadata(
jsonMetadata: string,
depsCacheDir: string,
processing: Promise<void>
depsCacheDir: string
) {
const metadata = JSON.parse(jsonMetadata, (key: string, value: string) => {
// Paths can be absolute or relative to the deps cache dir where
Expand All @@ -682,25 +710,69 @@ function parseOptimizedDepsMetadata(
}
return value
})
const { browserHash } = metadata
for (const o of Object.keys(metadata.optimized)) {
metadata.optimized[o].processing = processing
const depInfo = metadata.optimized[o]
depInfo.browserHash = browserHash
}
metadata.chunks ||= {} // Support missing chunks for back compat
for (const o of Object.keys(metadata.chunks)) {
const depInfo = metadata.chunks[o]
depInfo.src = ''
depInfo.browserHash = browserHash
}
return { ...metadata, discovered: {} }
metadata.discovered = {}
return metadata
}

/**
* Stringify metadata for deps cache. Remove processing promises
* and individual dep info browserHash. Once the cache is reload
* the next time the server start we need to use the global
* browserHash to allow long term caching
*/
function stringifyOptimizedDepsMetadata(
metadata: DepOptimizationMetadata,
depsCacheDir: string
) {
return JSON.stringify(
metadata,
(key: string, value: any) => {
if (key === 'processing' || key === 'discovered') {
if (key === 'discovered' || key === 'processing') {
return
}
if (key === 'file' || key === 'src') {
return normalizePath(path.relative(depsCacheDir, value))
}
if (key === 'optimized') {
// Only remove browserHash for individual dep info
const cleaned: Record<string, object> = {}
for (const dep of Object.keys(value)) {
const { browserHash, ...c } = value[dep]
cleaned[dep] = c
}
return cleaned
}
if (key === 'optimized') {
return Object.keys(value).reduce(
(cleaned: Record<string, object>, dep: string) => {
const { browserHash, ...c } = value[dep]
cleaned[dep] = c
return cleaned
},
{}
)
}
if (key === 'chunks') {
return Object.keys(value).reduce(
(cleaned: Record<string, object>, dep: string) => {
const { browserHash, needsInterop, src, ...c } = value[dep]
cleaned[dep] = c
return cleaned
},
{}
)
}
return value
},
2
Expand Down Expand Up @@ -797,7 +869,8 @@ export function optimizeDepInfoFromFile(
): OptimizedDepInfo | undefined {
return (
findFileInfo(metadata.optimized, file) ||
findFileInfo(metadata.discovered, file)
findFileInfo(metadata.discovered, file) ||
findFileInfo(metadata.chunks, file)
)
}

Expand Down
23 changes: 21 additions & 2 deletions packages/vite/src/node/optimizer/registerMissing.ts
Expand Up @@ -2,7 +2,7 @@ import colors from 'picocolors'
import {
createOptimizeDepsRun,
getOptimizedDepPath,
getOptimizedBrowserHash,
getHash,
depsFromOptimizedDepInfo,
newDepOptimizationProcessing
} from '.'
Expand Down Expand Up @@ -202,6 +202,21 @@ export function createMissingImporterRegisterFn(
})
}

const discoveredTimestamp = Date.now()

function getDiscoveredBrowserHash(
hash: string,
deps: Record<string, string>,
missing: Record<string, string>
) {
return getHash(
hash +
JSON.stringify(deps) +
JSON.stringify(missing) +
discoveredTimestamp
)
}

return function registerMissingImport(
id: string,
resolved: string,
Expand All @@ -211,6 +226,10 @@ export function createMissingImporterRegisterFn(
if (optimized) {
return optimized
}
const chunk = metadata.chunks[id]
if (chunk) {
return chunk
}
let missing = metadata.discovered[id]
if (missing) {
// We are already discover this dependency
Expand All @@ -225,7 +244,7 @@ export function createMissingImporterRegisterFn(
// the current state of known + missing deps. If its optimizeDeps run
// doesn't alter the bundled files of previous known dependendencies,
// we don't need a full reload and this browserHash will be kept
browserHash: getOptimizedBrowserHash(
browserHash: getDiscoveredBrowserHash(
metadata.hash,
depsFromOptimizedDepInfo(metadata.optimized),
depsFromOptimizedDepInfo(metadata.discovered)
Expand Down
6 changes: 5 additions & 1 deletion packages/vite/src/node/plugins/importAnalysis.ts
Expand Up @@ -271,7 +271,11 @@ export function importAnalysisPlugin(config: ResolvedConfig): Plugin {
// (e.g. vue blocks), inherit importer's version query
// do not do this for unknown type imports, otherwise the appended
// query can break 3rd party plugin's extension checks.
if ((isRelative || isSelfImport) && !/[\?&]import=?\b/.test(url)) {
if (
(isRelative || isSelfImport) &&
!/[\?&]import=?\b/.test(url) &&
!url.match(DEP_VERSION_RE)
) {
const versionMatch = importer.match(DEP_VERSION_RE)
if (versionMatch) {
url = injectQuery(url, versionMatch[1])
Expand Down
30 changes: 28 additions & 2 deletions packages/vite/src/node/plugins/resolve.ts
Expand Up @@ -7,7 +7,8 @@ import {
SPECIAL_QUERY_RE,
DEFAULT_EXTENSIONS,
DEFAULT_MAIN_FIELDS,
OPTIMIZABLE_ENTRY_RE
OPTIMIZABLE_ENTRY_RE,
DEP_VERSION_RE
} from '../constants'
import {
isBuiltin,
Expand All @@ -29,7 +30,11 @@ import {
isPossibleTsOutput,
getPotentialTsSrcPaths
} from '../utils'
import { createIsOptimizedDepUrl } from '../optimizer'
import {
createIsOptimizedDepUrl,
isOptimizedDepFile,
optimizeDepInfoFromFile
} from '../optimizer'
import type { OptimizedDepInfo } from '../optimizer'
import type { ViteDevServer, SSROptions } from '..'
import type { PartialResolvedId } from 'rollup'
Expand Down Expand Up @@ -163,6 +168,22 @@ export function resolvePlugin(baseOptions: InternalResolveOptions): Plugin {
// handle browser field mapping for relative imports

const normalizedFsPath = normalizePath(fsPath)

if (server && isOptimizedDepFile(normalizedFsPath, server!.config)) {
// Optimized files could not yet exist in disk, resolve to the full path
// Inject the current browserHash version if the path doesn't have one
if (!normalizedFsPath.match(DEP_VERSION_RE)) {
const browserHash = optimizeDepInfoFromFile(
server._optimizeDepsMetadata!,
normalizedFsPath
)?.browserHash
if (browserHash) {
return injectQuery(normalizedFsPath, `v=${browserHash}`)
}
}
return normalizedFsPath
}

const pathFromBasedir = normalizedFsPath.slice(basedir.length)
if (pathFromBasedir.startsWith('/node_modules/')) {
// normalize direct imports from node_modules to bare imports, so the
Expand Down Expand Up @@ -638,6 +659,11 @@ export function tryOptimizedResolve(
return getOptimizedUrl(isOptimized)
}

const isChunk = depData.chunks[id]
if (isChunk) {
return getOptimizedUrl(isChunk)
}

if (!importer) return

// further check if id is imported by nested dependency
Expand Down

0 comments on commit 392a0de

Please sign in to comment.