fix shortest path for non-md files, mobile fix

This commit is contained in:
Jacky Zhao 2023-08-06 20:52:17 -07:00
parent 22d2d344f2
commit 66e8afb008
4 changed files with 62 additions and 49 deletions

View File

@ -57,13 +57,14 @@ async function buildQuartz(argv: Argv, clientRefresh: () => void) {
console.log(`Cleaned output directory \`${output}\` in ${perf.timeSince("clean")}`) console.log(`Cleaned output directory \`${output}\` in ${perf.timeSince("clean")}`)
perf.addEvent("glob") perf.addEvent("glob")
const fps = await glob("**/*.md", argv.directory, cfg.configuration.ignorePatterns) const allFiles = await glob("**/*.*", argv.directory, cfg.configuration.ignorePatterns)
const fps = allFiles.filter((fp) => fp.endsWith(".md"))
console.log( console.log(
`Found ${fps.length} input files from \`${argv.directory}\` in ${perf.timeSince("glob")}`, `Found ${fps.length} input files from \`${argv.directory}\` in ${perf.timeSince("glob")}`,
) )
const filePaths = fps.map((fp) => joinSegments(argv.directory, fp) as FilePath) const filePaths = fps.map((fp) => joinSegments(argv.directory, fp) as FilePath)
ctx.allSlugs = fps.map((fp) => slugifyFilePath(fp as FilePath)) ctx.allSlugs = allFiles.map((fp) => slugifyFilePath(fp as FilePath))
const parsedFiles = await parseMarkdown(ctx, filePaths) const parsedFiles = await parseMarkdown(ctx, filePaths)
const filteredContent = filterContent(ctx, parsedFiles) const filteredContent = filterContent(ctx, parsedFiles)
@ -93,61 +94,72 @@ async function startServing(
let timeoutId: ReturnType<typeof setTimeout> | null = null let timeoutId: ReturnType<typeof setTimeout> | null = null
let toRebuild: Set<FilePath> = new Set() let toRebuild: Set<FilePath> = new Set()
let toRemove: Set<FilePath> = new Set() let toRemove: Set<FilePath> = new Set()
let trackedAssets: Set<FilePath> = new Set()
async function rebuild(fp: string, action: "add" | "change" | "delete") { async function rebuild(fp: string, action: "add" | "change" | "delete") {
// don't do anything for gitignored files
if (ignored(fp)) {
return
}
// dont bother rebuilding for non-content files, just track and refresh
if (path.extname(fp) !== ".md") { if (path.extname(fp) !== ".md") {
// dont bother rebuilding for non-content files, just refresh fp = toPosixPath(fp)
const filePath = joinSegments(argv.directory, fp) as FilePath
if (action === "add" || action === "change") {
trackedAssets.add(filePath)
} else if (action === "delete") {
trackedAssets.add(filePath)
}
clientRefresh() clientRefresh()
return return
} }
fp = toPosixPath(fp) fp = toPosixPath(fp)
if (!ignored(fp)) { const filePath = joinSegments(argv.directory, fp) as FilePath
const filePath = joinSegments(argv.directory, fp) as FilePath if (action === "add" || action === "change") {
if (action === "add" || action === "change") { toRebuild.add(filePath)
toRebuild.add(filePath) } else if (action === "delete") {
} else if (action === "delete") { toRemove.add(filePath)
toRemove.add(filePath) }
}
if (timeoutId) { if (timeoutId) {
clearTimeout(timeoutId) clearTimeout(timeoutId)
} }
// debounce rebuilds every 250ms // debounce rebuilds every 250ms
timeoutId = setTimeout(async () => { timeoutId = setTimeout(async () => {
const perf = new PerfTimer() const perf = new PerfTimer()
console.log(chalk.yellow("Detected change, rebuilding...")) console.log(chalk.yellow("Detected change, rebuilding..."))
try { try {
const filesToRebuild = [...toRebuild].filter((fp) => !toRemove.has(fp)) const filesToRebuild = [...toRebuild].filter((fp) => !toRemove.has(fp))
ctx.allSlugs = [...new Set([...contentMap.keys(), ...toRebuild])] ctx.allSlugs = [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])]
.filter((fp) => !toRemove.has(fp)) .filter((fp) => !toRemove.has(fp))
.map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath)) .map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath))
const parsedContent = await parseMarkdown(ctx, filesToRebuild) const parsedContent = await parseMarkdown(ctx, filesToRebuild)
for (const content of parsedContent) { for (const content of parsedContent) {
const [_tree, vfile] = content const [_tree, vfile] = content
contentMap.set(vfile.data.filePath!, content) contentMap.set(vfile.data.filePath!, content)
}
for (const fp of toRemove) {
contentMap.delete(fp)
}
await rimraf(argv.output)
const parsedFiles = [...contentMap.values()]
const filteredContent = filterContent(ctx, parsedFiles)
await emitContent(ctx, filteredContent)
console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
} catch {
console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`))
} }
clientRefresh() for (const fp of toRemove) {
toRebuild.clear() contentMap.delete(fp)
toRemove.clear() }
}, 250)
} await rimraf(argv.output)
const parsedFiles = [...contentMap.values()]
const filteredContent = filterContent(ctx, parsedFiles)
await emitContent(ctx, filteredContent)
console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
} catch {
console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`))
}
clientRefresh()
toRebuild.clear()
toRemove.clear()
}, 250)
} }
const watcher = chokidar.watch(".", { const watcher = chokidar.watch(".", {

View File

@ -12,7 +12,7 @@ export const Assets: QuartzEmitterPlugin = () => {
}, },
async emit({ argv, cfg }, _content, _resources, _emit): Promise<FilePath[]> { async emit({ argv, cfg }, _content, _resources, _emit): Promise<FilePath[]> {
// glob all non MD/MDX/HTML files in content folder and copy it over // glob all non MD/MDX/HTML files in content folder and copy it over
const assetsPath = joinSegments(argv.output, "assets") const assetsPath = argv.output
const fps = await glob("**", argv.directory, ["**/*.md", ...cfg.configuration.ignorePatterns]) const fps = await glob("**", argv.directory, ["**/*.md", ...cfg.configuration.ignorePatterns])
const res: FilePath[] = [] const res: FilePath[] = []
for (const fp of fps) { for (const fp of fps) {
@ -24,7 +24,7 @@ export const Assets: QuartzEmitterPlugin = () => {
const dir = path.dirname(dest) as FilePath const dir = path.dirname(dest) as FilePath
await fs.promises.mkdir(dir, { recursive: true }) // ensure dir exists await fs.promises.mkdir(dir, { recursive: true }) // ensure dir exists
await fs.promises.copyFile(src, dest) await fs.promises.copyFile(src, dest)
res.push(joinSegments("assets", fp) as FilePath) res.push(fp)
} }
return res return res

View File

@ -99,9 +99,10 @@ export const CrawlLinks: QuartzTransformerPlugin<Partial<Options> | undefined> =
typeof node.properties.src === "string" typeof node.properties.src === "string"
) { ) {
if (!isAbsoluteUrl(node.properties.src)) { if (!isAbsoluteUrl(node.properties.src)) {
let dest = node.properties.src as RelativeURL
const ext = path.extname(node.properties.src) const ext = path.extname(node.properties.src)
node.properties.src = dest = node.properties.src = transformLink(dest)
transformLink(joinSegments("assets", node.properties.src)) + ext node.properties.src = dest + ext
} }
} }
}) })

View File

@ -83,7 +83,7 @@ a {
@media all and (max-width: $fullPageWidth) { @media all and (max-width: $fullPageWidth) {
margin: 0 auto; margin: 0 auto;
padding: 0 1rem; padding: 0 1rem;
max-width: 800px; // max-width: 800px;
} }
& article { & article {