From 6ab90d093c9414a8ad591ce2df997d8e399729d1 Mon Sep 17 00:00:00 2001 From: Jacky Zhao Date: Thu, 17 Aug 2023 01:58:11 -0700 Subject: [PATCH] fix rebuild debouncing --- content/features/upcoming features.md | 3 +- quartz/bootstrap-cli.mjs | 8 +++- quartz/build.ts | 68 +++++++++++++-------------- 3 files changed, 42 insertions(+), 37 deletions(-) diff --git a/content/features/upcoming features.md b/content/features/upcoming features.md index 7093a5bf..28ee48a4 100644 --- a/content/features/upcoming features.md +++ b/content/features/upcoming features.md @@ -4,9 +4,8 @@ draft: true ## todo -- debounce cfg rebuild on large repos - - investigate content rebuild triggering multiple times even when debounced, causing an esbuild deadlock - dereference symlink for npx quartz sync + - prompt user as to whether to do it (it's expensive for large vaults) ## high priority backlog diff --git a/quartz/bootstrap-cli.mjs b/quartz/bootstrap-cli.mjs index c1c13085..8efa7b04 100755 --- a/quartz/bootstrap-cli.mjs +++ b/quartz/bootstrap-cli.mjs @@ -355,6 +355,7 @@ See the [documentation](https://quartz.jzhao.xyz) for how to get started. ], }) + const timeoutIds = new Set() const build = async (clientRefresh) => { const result = await ctx.rebuild().catch((err) => { console.error(`${chalk.red("Couldn't parse Quartz configuration:")} ${fp}`) @@ -380,6 +381,11 @@ See the [documentation](https://quartz.jzhao.xyz) for how to get started. clientRefresh() } + const rebuild = (clientRefresh) => { + timeoutIds.forEach((id) => clearTimeout(id)) + timeoutIds.add(setTimeout(() => build(clientRefresh), 250)) + } + if (argv.serve) { const wss = new WebSocketServer({ port: 3001 }) const connections = [] @@ -457,7 +463,7 @@ See the [documentation](https://quartz.jzhao.xyz) for how to get started. }) .on("all", async () => { console.log(chalk.yellow("Detected a source code change, doing a hard rebuild...")) - await build(clientRefresh) + rebuild(clientRefresh) }) } else { await build(() => {}) diff --git a/quartz/build.ts b/quartz/build.ts index 779ab35e..b5b1f9ee 100644 --- a/quartz/build.ts +++ b/quartz/build.ts @@ -77,7 +77,7 @@ async function startServing( } const initialSlugs = ctx.allSlugs - let timeoutId: ReturnType | null = null + let timeoutIds: Set> = new Set() let toRebuild: Set = new Set() let toRemove: Set = new Set() let trackedAssets: Set = new Set() @@ -106,45 +106,45 @@ async function startServing( toRemove.add(filePath) } - if (timeoutId) { - clearTimeout(timeoutId) - } + timeoutIds.forEach((id) => clearTimeout(id)) // debounce rebuilds every 250ms - timeoutId = setTimeout(async () => { - const perf = new PerfTimer() - console.log(chalk.yellow("Detected change, rebuilding...")) - try { - const filesToRebuild = [...toRebuild].filter((fp) => !toRemove.has(fp)) + timeoutIds.add( + setTimeout(async () => { + const perf = new PerfTimer() + console.log(chalk.yellow("Detected change, rebuilding...")) + try { + const filesToRebuild = [...toRebuild].filter((fp) => !toRemove.has(fp)) - const trackedSlugs = [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])] - .filter((fp) => !toRemove.has(fp)) - .map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath)) + const trackedSlugs = [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])] + .filter((fp) => !toRemove.has(fp)) + .map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath)) - ctx.allSlugs = [...new Set([...initialSlugs, ...trackedSlugs])] - const parsedContent = await parseMarkdown(ctx, filesToRebuild) - for (const content of parsedContent) { - const [_tree, vfile] = content - contentMap.set(vfile.data.filePath!, content) + ctx.allSlugs = [...new Set([...initialSlugs, ...trackedSlugs])] + const parsedContent = await parseMarkdown(ctx, filesToRebuild) + for (const content of parsedContent) { + const [_tree, vfile] = content + contentMap.set(vfile.data.filePath!, content) + } + + for (const fp of toRemove) { + contentMap.delete(fp) + } + + await rimraf(argv.output) + const parsedFiles = [...contentMap.values()] + const filteredContent = filterContent(ctx, parsedFiles) + await emitContent(ctx, filteredContent) + console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`)) + } catch { + console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`)) } - for (const fp of toRemove) { - contentMap.delete(fp) - } - - await rimraf(argv.output) - const parsedFiles = [...contentMap.values()] - const filteredContent = filterContent(ctx, parsedFiles) - await emitContent(ctx, filteredContent) - console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`)) - } catch { - console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`)) - } - - clientRefresh() - toRebuild.clear() - toRemove.clear() - }, 250) + clientRefresh() + toRebuild.clear() + toRemove.clear() + }, 250), + ) } const watcher = chokidar.watch(".", {