Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: delayed full page reload #7347

Merged
merged 1 commit into from Mar 16, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
98 changes: 65 additions & 33 deletions packages/vite/src/node/optimizer/index.ts
Expand Up @@ -101,11 +101,18 @@ export interface DepOptimizationResult {
* for large applications
*/
alteredFiles: boolean
/**
* When doing a re-run, if there are newly discovered dependendencies
* the page reload will be delayed until the next rerun so the
* result will be discarded
*/
commit: () => void
cancel: () => void
}

export interface DepOptimizationProcessing {
promise: Promise<DepOptimizationResult | undefined>
resolve: (result?: DepOptimizationResult) => void
promise: Promise<void>
resolve: () => void
}

export interface OptimizedDepInfo {
Expand All @@ -118,7 +125,7 @@ export interface OptimizedDepInfo {
* During optimization, ids can still be resolved to their final location
* but the bundles may not yet be saved to disk
*/
processing: Promise<DepOptimizationResult | undefined>
processing: Promise<void>
}

export interface DepOptimizationMetadata {
Expand All @@ -141,11 +148,6 @@ export interface DepOptimizationMetadata {
* Metadata for each newly discovered dependency after processing
*/
discovered: Record<string, OptimizedDepInfo>
/**
* During optimization, ids can still be resolved to their final location
* but the bundles may not yet be saved to disk
*/
processing: Promise<DepOptimizationResult | undefined>
}

/**
Expand All @@ -166,7 +168,8 @@ export async function optimizeDeps(
newDeps,
ssr
)
await run()
const result = await run()
result.commit()
return metadata
}

Expand All @@ -183,7 +186,7 @@ export async function createOptimizeDepsRun(
ssr?: boolean
): Promise<{
metadata: DepOptimizationMetadata
run: () => Promise<DepOptimizationResult | undefined>
run: () => Promise<DepOptimizationResult>
}> {
config = {
...config,
Expand All @@ -210,8 +213,7 @@ export async function createOptimizeDepsRun(
hash: mainHash,
browserHash: mainHash,
optimized: {},
discovered: {},
processing: processing.promise
discovered: {}
}

if (!force) {
Expand All @@ -227,9 +229,20 @@ export async function createOptimizeDepsRun(
// hash is consistent, no need to re-bundle
if (prevData && prevData.hash === metadata.hash) {
log('Hash is consistent. Skipping. Use --force to override.')
// Nothing to commit or cancel as we are using the cache, we only
// need to resolve the processing promise so requests can move on
const resolve = () => {
processing.resolve()
}
return {
metadata: prevData,
run: () => (processing.resolve(), processing.promise)
run: async () => {
return {
alteredFiles: false,
commit: resolve,
cancel: resolve
}
}
}
}
}
Expand Down Expand Up @@ -315,19 +328,24 @@ export async function createOptimizeDepsRun(

return { metadata, run: prebundleDeps }

async function prebundleDeps(): Promise<DepOptimizationResult | undefined> {
async function prebundleDeps(): Promise<DepOptimizationResult> {
// We prebundle dependencies with esbuild and cache them, but there is no need
// to wait here. Code that needs to access the cached deps needs to await
// the optimizeDepsMetadata.processing promise
// the optimizeDepInfo.processing promise for each dep

const qualifiedIds = Object.keys(deps)

if (!qualifiedIds.length) {
// Write metadata file, delete `deps` folder and rename the `processing` folder to `deps`
commitProcessingDepsCacheSync()
log(`No dependencies to bundle. Skipping.\n\n\n`)
processing.resolve()
return
return {
alteredFiles: false,
commit() {
// Write metadata file, delete `deps` folder and rename the `processing` folder to `deps`
commitProcessingDepsCacheSync()
log(`No dependencies to bundle. Skipping.\n\n\n`)
processing.resolve()
},
cancel
}
}

let depsString: string
Expand Down Expand Up @@ -510,25 +528,39 @@ export async function createOptimizeDepsRun(
metadata.browserHash = newBrowserHash
}

// Write metadata file, delete `deps` folder and rename the new `processing` folder to `deps` in sync
commitProcessingDepsCacheSync()

debug(`deps bundled in ${(performance.now() - start).toFixed(2)}ms`)
processing.resolve({ alteredFiles })
return processing.promise

return {
alteredFiles,
commit() {
// Write metadata file, delete `deps` folder and rename the new `processing` folder to `deps` in sync
commitProcessingDepsCacheSync()
processing.resolve()
},
cancel
}
}

function commitProcessingDepsCacheSync() {
// Rewire the file paths from the temporal processing dir to the final deps cache dir
const dataPath = path.join(processingCacheDir, '_metadata.json')
writeFile(dataPath, stringifyOptimizedDepsMetadata(metadata, depsCacheDir))
// Processing is done, we can now replace the depsCacheDir with processingCacheDir
if (fs.existsSync(depsCacheDir)) {
const rmSync = fs.rmSync ?? fs.rmdirSync // TODO: Remove after support for Node 12 is dropped
rmSync(depsCacheDir, { recursive: true })
}
removeDirSync(depsCacheDir)
fs.renameSync(processingCacheDir, depsCacheDir)
}

function cancel() {
removeDirSync(processingCacheDir)
processing.resolve()
}
}

function removeDirSync(dir: string) {
if (fs.existsSync(dir)) {
const rmSync = fs.rmSync ?? fs.rmdirSync // TODO: Remove after support for Node 12 is dropped
rmSync(dir, { recursive: true })
}
}

export async function findKnownImports(
Expand Down Expand Up @@ -565,10 +597,10 @@ async function addManuallyIncludedOptimizeDeps(
}

export function newDepOptimizationProcessing(): DepOptimizationProcessing {
let resolve: (result?: DepOptimizationResult) => void
let resolve: () => void
const promise = new Promise((_resolve) => {
resolve = _resolve
}) as Promise<DepOptimizationResult | undefined>
}) as Promise<void>
return { promise, resolve: resolve! }
}

Expand Down Expand Up @@ -638,7 +670,7 @@ export function createIsOptimizedDepUrl(config: ResolvedConfig) {
function parseOptimizedDepsMetadata(
jsonMetadata: string,
depsCacheDir: string,
processing: Promise<DepOptimizationResult | undefined>
processing: Promise<void>
) {
const metadata = JSON.parse(jsonMetadata, (key: string, value: string) => {
// Paths can be absolute or relative to the deps cache dir where
Expand All @@ -651,7 +683,7 @@ function parseOptimizedDepsMetadata(
for (const o of Object.keys(metadata.optimized)) {
metadata.optimized[o].processing = processing
}
return { ...metadata, discovered: {}, processing }
return { ...metadata, discovered: {} }
}

function stringifyOptimizedDepsMetadata(
Expand Down