fix rebuild debouncing
This commit is contained in:
parent
a3cba52362
commit
6ab90d093c
@ -4,9 +4,8 @@ draft: true
|
|||||||
|
|
||||||
## todo
|
## todo
|
||||||
|
|
||||||
- debounce cfg rebuild on large repos
|
|
||||||
- investigate content rebuild triggering multiple times even when debounced, causing an esbuild deadlock
|
|
||||||
- dereference symlink for npx quartz sync
|
- dereference symlink for npx quartz sync
|
||||||
|
- prompt user as to whether to do it (it's expensive for large vaults)
|
||||||
|
|
||||||
## high priority backlog
|
## high priority backlog
|
||||||
|
|
||||||
|
@ -355,6 +355,7 @@ See the [documentation](https://quartz.jzhao.xyz) for how to get started.
|
|||||||
],
|
],
|
||||||
})
|
})
|
||||||
|
|
||||||
|
const timeoutIds = new Set()
|
||||||
const build = async (clientRefresh) => {
|
const build = async (clientRefresh) => {
|
||||||
const result = await ctx.rebuild().catch((err) => {
|
const result = await ctx.rebuild().catch((err) => {
|
||||||
console.error(`${chalk.red("Couldn't parse Quartz configuration:")} ${fp}`)
|
console.error(`${chalk.red("Couldn't parse Quartz configuration:")} ${fp}`)
|
||||||
@ -380,6 +381,11 @@ See the [documentation](https://quartz.jzhao.xyz) for how to get started.
|
|||||||
clientRefresh()
|
clientRefresh()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const rebuild = (clientRefresh) => {
|
||||||
|
timeoutIds.forEach((id) => clearTimeout(id))
|
||||||
|
timeoutIds.add(setTimeout(() => build(clientRefresh), 250))
|
||||||
|
}
|
||||||
|
|
||||||
if (argv.serve) {
|
if (argv.serve) {
|
||||||
const wss = new WebSocketServer({ port: 3001 })
|
const wss = new WebSocketServer({ port: 3001 })
|
||||||
const connections = []
|
const connections = []
|
||||||
@ -457,7 +463,7 @@ See the [documentation](https://quartz.jzhao.xyz) for how to get started.
|
|||||||
})
|
})
|
||||||
.on("all", async () => {
|
.on("all", async () => {
|
||||||
console.log(chalk.yellow("Detected a source code change, doing a hard rebuild..."))
|
console.log(chalk.yellow("Detected a source code change, doing a hard rebuild..."))
|
||||||
await build(clientRefresh)
|
rebuild(clientRefresh)
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
await build(() => {})
|
await build(() => {})
|
||||||
|
@ -77,7 +77,7 @@ async function startServing(
|
|||||||
}
|
}
|
||||||
|
|
||||||
const initialSlugs = ctx.allSlugs
|
const initialSlugs = ctx.allSlugs
|
||||||
let timeoutId: ReturnType<typeof setTimeout> | null = null
|
let timeoutIds: Set<ReturnType<typeof setTimeout>> = new Set()
|
||||||
let toRebuild: Set<FilePath> = new Set()
|
let toRebuild: Set<FilePath> = new Set()
|
||||||
let toRemove: Set<FilePath> = new Set()
|
let toRemove: Set<FilePath> = new Set()
|
||||||
let trackedAssets: Set<FilePath> = new Set()
|
let trackedAssets: Set<FilePath> = new Set()
|
||||||
@ -106,12 +106,11 @@ async function startServing(
|
|||||||
toRemove.add(filePath)
|
toRemove.add(filePath)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (timeoutId) {
|
timeoutIds.forEach((id) => clearTimeout(id))
|
||||||
clearTimeout(timeoutId)
|
|
||||||
}
|
|
||||||
|
|
||||||
// debounce rebuilds every 250ms
|
// debounce rebuilds every 250ms
|
||||||
timeoutId = setTimeout(async () => {
|
timeoutIds.add(
|
||||||
|
setTimeout(async () => {
|
||||||
const perf = new PerfTimer()
|
const perf = new PerfTimer()
|
||||||
console.log(chalk.yellow("Detected change, rebuilding..."))
|
console.log(chalk.yellow("Detected change, rebuilding..."))
|
||||||
try {
|
try {
|
||||||
@ -144,7 +143,8 @@ async function startServing(
|
|||||||
clientRefresh()
|
clientRefresh()
|
||||||
toRebuild.clear()
|
toRebuild.clear()
|
||||||
toRemove.clear()
|
toRemove.clear()
|
||||||
}, 250)
|
}, 250),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const watcher = chokidar.watch(".", {
|
const watcher = chokidar.watch(".", {
|
||||||
|
Loading…
Reference in New Issue
Block a user