Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: build time deps optimization, and ensure single crawl end call #12851

Merged
merged 8 commits into from Apr 14, 2023
5 changes: 5 additions & 0 deletions packages/vite/src/node/optimizer/index.ts
Expand Up @@ -504,6 +504,11 @@ export function runOptimizeDeps(
metadata,
cancel: cleanUp,
commit: async () => {
if (cleaned) {
throw new Error(
'Can not commit a Deps Optimization run as it was cancelled',
)
}
// Ignore clean up requests after this point so the temp folder isn't deleted before
// we finish commiting the new deps cache files to the deps folder
committed = true
Expand Down
119 changes: 90 additions & 29 deletions packages/vite/src/node/optimizer/optimizer.ts
Expand Up @@ -174,6 +174,7 @@ async function createDepsOptimizer(

async function close() {
closed = true
crawlEndFinder.cancel()
await Promise.allSettled([
discover?.cancel(),
depsOptimizer.scanProcessing,
Expand Down Expand Up @@ -582,14 +583,22 @@ async function createDepsOptimizer(
}, timeout)
}

// During dev, onCrawlEnd is called once when the server starts and all static
// imports after the first request have been crawled (dynamic imports may also
// be crawled if the browser requests them right away).
// During build, onCrawlEnd will be called once after each buildStart (so in
// watch mode it will be called after each rebuild has processed every module).
// All modules are transformed first in this case (both static and dynamic).
let crawlEndCalled = false
async function onCrawlEnd() {
const wasCalledBefore = crawlEndCalled
crawlEndCalled = true

debug?.(colors.green(`✨ static imports crawl ended`))
if (firstRunCalled) {
if (closed || firstRunCalled || wasCalledBefore) {
return
}

currentlyProcessing = false

const crawlDeps = Object.keys(metadata.discovered)

// Await for the scan+optimize step running in the background
Expand All @@ -599,6 +608,7 @@ async function createDepsOptimizer(
if (!isBuild && optimizationResult) {
const result = await optimizationResult.result
optimizationResult = undefined
currentlyProcessing = false

const scanDeps = Object.keys(result.metadata.optimized)

Expand Down Expand Up @@ -650,6 +660,8 @@ async function createDepsOptimizer(
runOptimizer(result)
}
} else {
currentlyProcessing = false

if (crawlDeps.length === 0) {
debug?.(
colors.green(
Expand All @@ -664,30 +676,65 @@ async function createDepsOptimizer(
}
}

const runOptimizerIfIdleAfterMs = 50
let crawlEndFinder = setupOnCrawlEnd(onCrawlEnd)

// Called during buildStart at build time, when build --watch is used.
// On dev, onCrawlEnd is called once. On build, we call it when all the
// user modules are processed for every rebuild.
function resetRegisteredIds() {
crawlEndCalled = false
crawlEndFinder.cancel()
crawlEndFinder = setupOnCrawlEnd(onCrawlEnd)
}

function registerWorkersSource(id: string) {
crawlEndFinder.registerWorkersSource(id)
}
function delayDepsOptimizerUntil(id: string, done: () => Promise<any>) {
if (!crawlEndCalled && !depsOptimizer.isOptimizedDepFile(id)) {
crawlEndFinder.delayDepsOptimizerUntil(id, done)
}
}
function ensureFirstRun() {
if (!firstRunCalled) crawlEndFinder.ensureFirstRun()
}
}

const runOptimizerIfIdleAfterMs = 50

function setupOnCrawlEnd(onCrawlEnd: () => void): {
ensureFirstRun: () => void
registerWorkersSource: (id: string) => void
delayDepsOptimizerUntil: (id: string, done: () => Promise<any>) => void
cancel: () => void
} {
let registeredIds: { id: string; done: () => Promise<any> }[] = []
let seenIds = new Set<string>()
let workersSources = new Set<string>()
const waitingOn = new Set<string>()
const seenIds = new Set<string>()
const workersSources = new Set<string>()
const waitingOn = new Map<string, () => void>()
let firstRunEnsured = false
let crawlEndCalled = false

function resetRegisteredIds() {
registeredIds = []
seenIds = new Set<string>()
workersSources = new Set<string>()
waitingOn.clear()
firstRunEnsured = false
let cancelled = false
function cancel() {
cancelled = true
}

function callOnCrawlEnd() {
if (!cancelled && !crawlEndCalled) {
crawlEndCalled = true
onCrawlEnd()
}
}

// If all the inputs are dependencies, we aren't going to get any
// delayDepsOptimizerUntil(id) calls. We need to guard against this
// by forcing a rerun if no deps have been registered
function ensureFirstRun() {
if (!firstRunEnsured && !firstRunCalled && registeredIds.length === 0) {
if (!firstRunEnsured && seenIds.size === 0) {
setTimeout(() => {
if (!closed && registeredIds.length === 0) {
onCrawlEnd()
if (seenIds.size === 0) {
callOnCrawlEnd()
Comment on lines +749 to +750
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

registeredIds.length === 0 to seenIds.size === 0 is also needed after #12609

}
}, runOptimizerIfIdleAfterMs)
}
Expand All @@ -699,37 +746,44 @@ async function createDepsOptimizer(
// Avoid waiting for this id, as it may be blocked by the rollup
// bundling process of the worker that also depends on the optimizer
registeredIds = registeredIds.filter((registered) => registered.id !== id)
if (waitingOn.has(id)) {
waitingOn.delete(id)
runOptimizerWhenIdle()
}

const resolve = waitingOn.get(id)
// Forced resolve to avoid waiting for the bundling of the worker to finish
resolve?.()
}

function delayDepsOptimizerUntil(id: string, done: () => Promise<any>): void {
if (!depsOptimizer.isOptimizedDepFile(id) && !seenIds.has(id)) {
if (!seenIds.has(id)) {
seenIds.add(id)
registeredIds.push({ id, done })
runOptimizerWhenIdle()
callOnCrawlEndWhenIdle()
}
}

async function runOptimizerWhenIdle() {
if (waitingOn.size > 0) return
async function callOnCrawlEndWhenIdle() {
if (cancelled || waitingOn.size > 0) return

const processingRegisteredIds = registeredIds
registeredIds = []

const donePromises = processingRegisteredIds.map(async (registeredId) => {
waitingOn.add(registeredId.id)
// During build, we need to cancel workers
let resolve: () => void
const waitUntilDone = new Promise<void>((_resolve) => {
resolve = _resolve
registeredId.done().finally(() => resolve())
})
waitingOn.set(registeredId.id, () => resolve())

try {
await registeredId.done()
await waitUntilDone
} finally {
waitingOn.delete(registeredId.id)
}
})

const afterLoad = () => {
if (closed) return
if (cancelled) return
if (
registeredIds.length > 0 &&
registeredIds.every((registeredId) =>
Expand All @@ -740,9 +794,9 @@ async function createDepsOptimizer(
}

if (registeredIds.length > 0) {
runOptimizerWhenIdle()
callOnCrawlEndWhenIdle()
} else {
onCrawlEnd()
callOnCrawlEnd()
}
}

Expand All @@ -756,6 +810,13 @@ async function createDepsOptimizer(
setTimeout(afterLoad, runOptimizerIfIdleAfterMs)
}
}

return {
ensureFirstRun,
registerWorkersSource,
delayDepsOptimizerUntil,
cancel,
}
}

async function createDevSsrDepsOptimizer(
Expand Down
2 changes: 1 addition & 1 deletion playground/worker/classic-shared-worker.js
@@ -1,5 +1,5 @@
let base = `/${self.location.pathname.split('/')[1]}`
if (base === `/worker-entries`) base = '' // relative base
if (base.endsWith('.js') || base === `/worker-entries`) base = '' // for dev

importScripts(`${base}/classic.js`)

Expand Down
2 changes: 1 addition & 1 deletion playground/worker/classic-worker.js
@@ -1,5 +1,5 @@
let base = `/${self.location.pathname.split('/')[1]}`
if (base === `/worker-entries`) base = '' // relative base
if (base.endsWith('.js') || base === `/worker-entries`) base = '' // for dev
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Started getting a lot of warnings in dev

Error: Failed to load url /classic-shared-worker.js/classic.js (resolved id: /classic-shared-worker.js/classic.js). Does the file exist?
  loadAndTransform packages/vite/src/node/server/transformRequest.ts:242:22
  processTicksAndRejections node:internal/process/task_queues:95:5

I don't know if this is the correct fix, seems the PR is exposing this issue?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Seems to happen on main too. This fix works for me.


importScripts(`${base}/classic.js`)

Expand Down