perf: incremental rebuild (--fastRebuild v2 but default) (#1841)
* checkpoint * incremental all the things * properly splice changes array * smol doc update * update docs * make fancy logger dumb in ci
This commit is contained in:
		@@ -221,12 +221,26 @@ export type QuartzEmitterPlugin<Options extends OptionType = undefined> = (
 | 
			
		||||
 | 
			
		||||
export type QuartzEmitterPluginInstance = {
 | 
			
		||||
  name: string
 | 
			
		||||
  emit(ctx: BuildCtx, content: ProcessedContent[], resources: StaticResources): Promise<FilePath[]>
 | 
			
		||||
  emit(
 | 
			
		||||
    ctx: BuildCtx,
 | 
			
		||||
    content: ProcessedContent[],
 | 
			
		||||
    resources: StaticResources,
 | 
			
		||||
  ): Promise<FilePath[]> | AsyncGenerator<FilePath>
 | 
			
		||||
  partialEmit?(
 | 
			
		||||
    ctx: BuildCtx,
 | 
			
		||||
    content: ProcessedContent[],
 | 
			
		||||
    resources: StaticResources,
 | 
			
		||||
    changeEvents: ChangeEvent[],
 | 
			
		||||
  ): Promise<FilePath[]> | AsyncGenerator<FilePath> | null
 | 
			
		||||
  getQuartzComponents(ctx: BuildCtx): QuartzComponent[]
 | 
			
		||||
}
 | 
			
		||||
```
 | 
			
		||||
 | 
			
		||||
An emitter plugin must define a `name` field, an `emit` function, and a `getQuartzComponents` function. `emit` is responsible for looking at all the parsed and filtered content and then appropriately creating files and returning a list of paths to files the plugin created.
 | 
			
		||||
An emitter plugin must define a `name` field, an `emit` function, and a `getQuartzComponents` function. It can optionally implement a `partialEmit` function for incremental builds.
 | 
			
		||||
 | 
			
		||||
- `emit` is responsible for looking at all the parsed and filtered content and then appropriately creating files and returning a list of paths to files the plugin created.
 | 
			
		||||
- `partialEmit` is an optional function that enables incremental builds. It receives information about which files have changed (`changeEvents`) and can selectively rebuild only the necessary files. This is useful for optimizing build times in development mode. If `partialEmit` is undefined, it will default to the `emit` function.
 | 
			
		||||
- `getQuartzComponents` declares which Quartz components the emitter uses to construct its pages.
 | 
			
		||||
 | 
			
		||||
Creating new files can be done via regular Node [fs module](https://nodejs.org/api/fs.html) (i.e. `fs.cp` or `fs.writeFile`) or via the `write` function in `quartz/plugins/emitters/helpers.ts` if you are creating files that contain text. `write` has the following signature:
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -32,7 +32,7 @@ If you prefer instructions in a video format you can try following Nicole van de
 | 
			
		||||
## 🔧 Features
 | 
			
		||||
 | 
			
		||||
- [[Obsidian compatibility]], [[full-text search]], [[graph view]], note transclusion, [[wikilinks]], [[backlinks]], [[features/Latex|Latex]], [[syntax highlighting]], [[popover previews]], [[Docker Support]], [[i18n|internationalization]], [[comments]] and [many more](./features/) right out of the box
 | 
			
		||||
- Hot-reload for both configuration and content
 | 
			
		||||
- Hot-reload on configuration edits and incremental rebuilds for content edits
 | 
			
		||||
- Simple JSX layouts and [[creating components|page components]]
 | 
			
		||||
- [[SPA Routing|Ridiculously fast page loads]] and tiny bundle sizes
 | 
			
		||||
- Fully-customizable parsing, filtering, and page generation through [[making plugins|plugins]]
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										22
									
								
								package-lock.json
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										22
									
								
								package-lock.json
									
									
									
										generated
									
									
									
								
							@@ -1,12 +1,12 @@
 | 
			
		||||
{
 | 
			
		||||
  "name": "@jackyzha0/quartz",
 | 
			
		||||
  "version": "4.4.1",
 | 
			
		||||
  "version": "4.5.0",
 | 
			
		||||
  "lockfileVersion": 3,
 | 
			
		||||
  "requires": true,
 | 
			
		||||
  "packages": {
 | 
			
		||||
    "": {
 | 
			
		||||
      "name": "@jackyzha0/quartz",
 | 
			
		||||
      "version": "4.4.1",
 | 
			
		||||
      "version": "4.5.0",
 | 
			
		||||
      "license": "MIT",
 | 
			
		||||
      "dependencies": {
 | 
			
		||||
        "@clack/prompts": "^0.10.0",
 | 
			
		||||
@@ -14,6 +14,7 @@
 | 
			
		||||
        "@myriaddreamin/rehype-typst": "^0.5.4",
 | 
			
		||||
        "@napi-rs/simple-git": "0.1.19",
 | 
			
		||||
        "@tweenjs/tween.js": "^25.0.0",
 | 
			
		||||
        "ansi-truncate": "^1.2.0",
 | 
			
		||||
        "async-mutex": "^0.5.0",
 | 
			
		||||
        "chalk": "^5.4.1",
 | 
			
		||||
        "chokidar": "^4.0.3",
 | 
			
		||||
@@ -34,6 +35,7 @@
 | 
			
		||||
        "mdast-util-to-hast": "^13.2.0",
 | 
			
		||||
        "mdast-util-to-string": "^4.0.0",
 | 
			
		||||
        "micromorph": "^0.4.5",
 | 
			
		||||
        "minimatch": "^10.0.1",
 | 
			
		||||
        "pixi.js": "^8.8.1",
 | 
			
		||||
        "preact": "^10.26.4",
 | 
			
		||||
        "preact-render-to-string": "^6.5.13",
 | 
			
		||||
@@ -2032,6 +2034,15 @@
 | 
			
		||||
        "url": "https://github.com/chalk/ansi-styles?sponsor=1"
 | 
			
		||||
      }
 | 
			
		||||
    },
 | 
			
		||||
    "node_modules/ansi-truncate": {
 | 
			
		||||
      "version": "1.2.0",
 | 
			
		||||
      "resolved": "https://registry.npmjs.org/ansi-truncate/-/ansi-truncate-1.2.0.tgz",
 | 
			
		||||
      "integrity": "sha512-/SLVrxNIP8o8iRHjdK3K9s2hDqdvb86NEjZOAB6ecWFsOo+9obaby97prnvAPn6j7ExXCpbvtlJFYPkkspg4BQ==",
 | 
			
		||||
      "license": "MIT",
 | 
			
		||||
      "dependencies": {
 | 
			
		||||
        "fast-string-truncated-width": "^1.2.0"
 | 
			
		||||
      }
 | 
			
		||||
    },
 | 
			
		||||
    "node_modules/argparse": {
 | 
			
		||||
      "version": "2.0.1",
 | 
			
		||||
      "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
 | 
			
		||||
@@ -3058,6 +3069,12 @@
 | 
			
		||||
        "node": ">=8.6.0"
 | 
			
		||||
      }
 | 
			
		||||
    },
 | 
			
		||||
    "node_modules/fast-string-truncated-width": {
 | 
			
		||||
      "version": "1.2.1",
 | 
			
		||||
      "resolved": "https://registry.npmjs.org/fast-string-truncated-width/-/fast-string-truncated-width-1.2.1.tgz",
 | 
			
		||||
      "integrity": "sha512-Q9acT/+Uu3GwGj+5w/zsGuQjh9O1TyywhIwAxHudtWrgF09nHOPrvTLhQevPbttcxjr/SNN7mJmfOw/B1bXgow==",
 | 
			
		||||
      "license": "MIT"
 | 
			
		||||
    },
 | 
			
		||||
    "node_modules/fastq": {
 | 
			
		||||
      "version": "1.19.0",
 | 
			
		||||
      "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.0.tgz",
 | 
			
		||||
@@ -5238,6 +5255,7 @@
 | 
			
		||||
      "version": "10.0.1",
 | 
			
		||||
      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.1.tgz",
 | 
			
		||||
      "integrity": "sha512-ethXTt3SGGR+95gudmqJ1eNhRO7eGEGIgYA9vnPatK4/etz2MEVDno5GMCibdMTuBMyElzIlgxMna3K94XDIDQ==",
 | 
			
		||||
      "license": "ISC",
 | 
			
		||||
      "dependencies": {
 | 
			
		||||
        "brace-expansion": "^2.0.1"
 | 
			
		||||
      },
 | 
			
		||||
 
 | 
			
		||||
@@ -2,7 +2,7 @@
 | 
			
		||||
  "name": "@jackyzha0/quartz",
 | 
			
		||||
  "description": "🌱 publish your digital garden and notes as a website",
 | 
			
		||||
  "private": true,
 | 
			
		||||
  "version": "4.4.1",
 | 
			
		||||
  "version": "4.5.0",
 | 
			
		||||
  "type": "module",
 | 
			
		||||
  "author": "jackyzha0 <j.zhao2k19@gmail.com>",
 | 
			
		||||
  "license": "MIT",
 | 
			
		||||
@@ -40,6 +40,7 @@
 | 
			
		||||
    "@myriaddreamin/rehype-typst": "^0.5.4",
 | 
			
		||||
    "@napi-rs/simple-git": "0.1.19",
 | 
			
		||||
    "@tweenjs/tween.js": "^25.0.0",
 | 
			
		||||
    "ansi-truncate": "^1.2.0",
 | 
			
		||||
    "async-mutex": "^0.5.0",
 | 
			
		||||
    "chalk": "^5.4.1",
 | 
			
		||||
    "chokidar": "^4.0.3",
 | 
			
		||||
@@ -60,6 +61,7 @@
 | 
			
		||||
    "mdast-util-to-hast": "^13.2.0",
 | 
			
		||||
    "mdast-util-to-string": "^4.0.0",
 | 
			
		||||
    "micromorph": "^0.4.5",
 | 
			
		||||
    "minimatch": "^10.0.1",
 | 
			
		||||
    "pixi.js": "^8.8.1",
 | 
			
		||||
    "preact": "^10.26.4",
 | 
			
		||||
    "preact-render-to-string": "^6.5.13",
 | 
			
		||||
 
 | 
			
		||||
@@ -57,7 +57,7 @@ const config: QuartzConfig = {
 | 
			
		||||
    transformers: [
 | 
			
		||||
      Plugin.FrontMatter(),
 | 
			
		||||
      Plugin.CreatedModifiedDate({
 | 
			
		||||
        priority: ["frontmatter", "filesystem"],
 | 
			
		||||
        priority: ["git", "frontmatter", "filesystem"],
 | 
			
		||||
      }),
 | 
			
		||||
      Plugin.SyntaxHighlighting({
 | 
			
		||||
        theme: {
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										442
									
								
								quartz/build.ts
									
									
									
									
									
								
							
							
						
						
									
										442
									
								
								quartz/build.ts
									
									
									
									
									
								
							@@ -9,7 +9,7 @@ import { parseMarkdown } from "./processors/parse"
 | 
			
		||||
import { filterContent } from "./processors/filter"
 | 
			
		||||
import { emitContent } from "./processors/emit"
 | 
			
		||||
import cfg from "../quartz.config"
 | 
			
		||||
import { FilePath, FullSlug, joinSegments, slugifyFilePath } from "./util/path"
 | 
			
		||||
import { FilePath, joinSegments, slugifyFilePath } from "./util/path"
 | 
			
		||||
import chokidar from "chokidar"
 | 
			
		||||
import { ProcessedContent } from "./plugins/vfile"
 | 
			
		||||
import { Argv, BuildCtx } from "./util/ctx"
 | 
			
		||||
@@ -17,34 +17,39 @@ import { glob, toPosixPath } from "./util/glob"
 | 
			
		||||
import { trace } from "./util/trace"
 | 
			
		||||
import { options } from "./util/sourcemap"
 | 
			
		||||
import { Mutex } from "async-mutex"
 | 
			
		||||
import DepGraph from "./depgraph"
 | 
			
		||||
import { getStaticResourcesFromPlugins } from "./plugins"
 | 
			
		||||
import { randomIdNonSecure } from "./util/random"
 | 
			
		||||
import { ChangeEvent } from "./plugins/types"
 | 
			
		||||
import { minimatch } from "minimatch"
 | 
			
		||||
 | 
			
		||||
type Dependencies = Record<string, DepGraph<FilePath> | null>
 | 
			
		||||
type ContentMap = Map<
 | 
			
		||||
  FilePath,
 | 
			
		||||
  | {
 | 
			
		||||
      type: "markdown"
 | 
			
		||||
      content: ProcessedContent
 | 
			
		||||
    }
 | 
			
		||||
  | {
 | 
			
		||||
      type: "other"
 | 
			
		||||
    }
 | 
			
		||||
>
 | 
			
		||||
 | 
			
		||||
type BuildData = {
 | 
			
		||||
  ctx: BuildCtx
 | 
			
		||||
  ignored: GlobbyFilterFunction
 | 
			
		||||
  mut: Mutex
 | 
			
		||||
  initialSlugs: FullSlug[]
 | 
			
		||||
  // TODO merge contentMap and trackedAssets
 | 
			
		||||
  contentMap: Map<FilePath, ProcessedContent>
 | 
			
		||||
  trackedAssets: Set<FilePath>
 | 
			
		||||
  toRebuild: Set<FilePath>
 | 
			
		||||
  toRemove: Set<FilePath>
 | 
			
		||||
  contentMap: ContentMap
 | 
			
		||||
  changesSinceLastBuild: Record<FilePath, ChangeEvent["type"]>
 | 
			
		||||
  lastBuildMs: number
 | 
			
		||||
  dependencies: Dependencies
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type FileEvent = "add" | "change" | "delete"
 | 
			
		||||
 | 
			
		||||
async function buildQuartz(argv: Argv, mut: Mutex, clientRefresh: () => void) {
 | 
			
		||||
  const ctx: BuildCtx = {
 | 
			
		||||
    buildId: randomIdNonSecure(),
 | 
			
		||||
    argv,
 | 
			
		||||
    cfg,
 | 
			
		||||
    allSlugs: [],
 | 
			
		||||
    allFiles: [],
 | 
			
		||||
    incremental: false,
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  const perf = new PerfTimer()
 | 
			
		||||
@@ -67,64 +72,70 @@ async function buildQuartz(argv: Argv, mut: Mutex, clientRefresh: () => void) {
 | 
			
		||||
 | 
			
		||||
  perf.addEvent("glob")
 | 
			
		||||
  const allFiles = await glob("**/*.*", argv.directory, cfg.configuration.ignorePatterns)
 | 
			
		||||
  const fps = allFiles.filter((fp) => fp.endsWith(".md")).sort()
 | 
			
		||||
  const markdownPaths = allFiles.filter((fp) => fp.endsWith(".md")).sort()
 | 
			
		||||
  console.log(
 | 
			
		||||
    `Found ${fps.length} input files from \`${argv.directory}\` in ${perf.timeSince("glob")}`,
 | 
			
		||||
    `Found ${markdownPaths.length} input files from \`${argv.directory}\` in ${perf.timeSince("glob")}`,
 | 
			
		||||
  )
 | 
			
		||||
 | 
			
		||||
  const filePaths = fps.map((fp) => joinSegments(argv.directory, fp) as FilePath)
 | 
			
		||||
  const filePaths = markdownPaths.map((fp) => joinSegments(argv.directory, fp) as FilePath)
 | 
			
		||||
  ctx.allFiles = allFiles
 | 
			
		||||
  ctx.allSlugs = allFiles.map((fp) => slugifyFilePath(fp as FilePath))
 | 
			
		||||
 | 
			
		||||
  const parsedFiles = await parseMarkdown(ctx, filePaths)
 | 
			
		||||
  const filteredContent = filterContent(ctx, parsedFiles)
 | 
			
		||||
 | 
			
		||||
  const dependencies: Record<string, DepGraph<FilePath> | null> = {}
 | 
			
		||||
 | 
			
		||||
  // Only build dependency graphs if we're doing a fast rebuild
 | 
			
		||||
  if (argv.fastRebuild) {
 | 
			
		||||
    const staticResources = getStaticResourcesFromPlugins(ctx)
 | 
			
		||||
    for (const emitter of cfg.plugins.emitters) {
 | 
			
		||||
      dependencies[emitter.name] =
 | 
			
		||||
        (await emitter.getDependencyGraph?.(ctx, filteredContent, staticResources)) ?? null
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  await emitContent(ctx, filteredContent)
 | 
			
		||||
  console.log(chalk.green(`Done processing ${fps.length} files in ${perf.timeSince()}`))
 | 
			
		||||
  console.log(chalk.green(`Done processing ${markdownPaths.length} files in ${perf.timeSince()}`))
 | 
			
		||||
  release()
 | 
			
		||||
 | 
			
		||||
  if (argv.serve) {
 | 
			
		||||
    return startServing(ctx, mut, parsedFiles, clientRefresh, dependencies)
 | 
			
		||||
  if (argv.watch) {
 | 
			
		||||
    ctx.incremental = true
 | 
			
		||||
    return startWatching(ctx, mut, parsedFiles, clientRefresh)
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// setup watcher for rebuilds
 | 
			
		||||
async function startServing(
 | 
			
		||||
async function startWatching(
 | 
			
		||||
  ctx: BuildCtx,
 | 
			
		||||
  mut: Mutex,
 | 
			
		||||
  initialContent: ProcessedContent[],
 | 
			
		||||
  clientRefresh: () => void,
 | 
			
		||||
  dependencies: Dependencies, // emitter name: dep graph
 | 
			
		||||
) {
 | 
			
		||||
  const { argv } = ctx
 | 
			
		||||
  const { argv, allFiles } = ctx
 | 
			
		||||
 | 
			
		||||
  // cache file parse results
 | 
			
		||||
  const contentMap = new Map<FilePath, ProcessedContent>()
 | 
			
		||||
  for (const content of initialContent) {
 | 
			
		||||
    const [_tree, vfile] = content
 | 
			
		||||
    contentMap.set(vfile.data.filePath!, content)
 | 
			
		||||
  const contentMap: ContentMap = new Map()
 | 
			
		||||
  for (const filePath of allFiles) {
 | 
			
		||||
    contentMap.set(filePath, {
 | 
			
		||||
      type: "other",
 | 
			
		||||
    })
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  for (const content of initialContent) {
 | 
			
		||||
    const [_tree, vfile] = content
 | 
			
		||||
    contentMap.set(vfile.data.relativePath!, {
 | 
			
		||||
      type: "markdown",
 | 
			
		||||
      content,
 | 
			
		||||
    })
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  const gitIgnoredMatcher = await isGitIgnored()
 | 
			
		||||
  const buildData: BuildData = {
 | 
			
		||||
    ctx,
 | 
			
		||||
    mut,
 | 
			
		||||
    dependencies,
 | 
			
		||||
    contentMap,
 | 
			
		||||
    ignored: await isGitIgnored(),
 | 
			
		||||
    initialSlugs: ctx.allSlugs,
 | 
			
		||||
    toRebuild: new Set<FilePath>(),
 | 
			
		||||
    toRemove: new Set<FilePath>(),
 | 
			
		||||
    trackedAssets: new Set<FilePath>(),
 | 
			
		||||
    ignored: (path) => {
 | 
			
		||||
      if (gitIgnoredMatcher(path)) return true
 | 
			
		||||
      const pathStr = path.toString()
 | 
			
		||||
      for (const pattern of cfg.configuration.ignorePatterns) {
 | 
			
		||||
        if (minimatch(pathStr, pattern)) {
 | 
			
		||||
          return true
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      return false
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    changesSinceLastBuild: {},
 | 
			
		||||
    lastBuildMs: 0,
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
@@ -134,34 +145,37 @@ async function startServing(
 | 
			
		||||
    ignoreInitial: true,
 | 
			
		||||
  })
 | 
			
		||||
 | 
			
		||||
  const buildFromEntry = argv.fastRebuild ? partialRebuildFromEntrypoint : rebuildFromEntrypoint
 | 
			
		||||
  const changes: ChangeEvent[] = []
 | 
			
		||||
  watcher
 | 
			
		||||
    .on("add", (fp) => buildFromEntry(fp as string, "add", clientRefresh, buildData))
 | 
			
		||||
    .on("change", (fp) => buildFromEntry(fp as string, "change", clientRefresh, buildData))
 | 
			
		||||
    .on("unlink", (fp) => buildFromEntry(fp as string, "delete", clientRefresh, buildData))
 | 
			
		||||
    .on("add", (fp) => {
 | 
			
		||||
      if (buildData.ignored(fp)) return
 | 
			
		||||
      changes.push({ path: fp as FilePath, type: "add" })
 | 
			
		||||
      void rebuild(changes, clientRefresh, buildData)
 | 
			
		||||
    })
 | 
			
		||||
    .on("change", (fp) => {
 | 
			
		||||
      if (buildData.ignored(fp)) return
 | 
			
		||||
      changes.push({ path: fp as FilePath, type: "change" })
 | 
			
		||||
      void rebuild(changes, clientRefresh, buildData)
 | 
			
		||||
    })
 | 
			
		||||
    .on("unlink", (fp) => {
 | 
			
		||||
      if (buildData.ignored(fp)) return
 | 
			
		||||
      changes.push({ path: fp as FilePath, type: "delete" })
 | 
			
		||||
      void rebuild(changes, clientRefresh, buildData)
 | 
			
		||||
    })
 | 
			
		||||
 | 
			
		||||
  return async () => {
 | 
			
		||||
    await watcher.close()
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
async function partialRebuildFromEntrypoint(
 | 
			
		||||
  filepath: string,
 | 
			
		||||
  action: FileEvent,
 | 
			
		||||
  clientRefresh: () => void,
 | 
			
		||||
  buildData: BuildData, // note: this function mutates buildData
 | 
			
		||||
) {
 | 
			
		||||
  const { ctx, ignored, dependencies, contentMap, mut, toRemove } = buildData
 | 
			
		||||
async function rebuild(changes: ChangeEvent[], clientRefresh: () => void, buildData: BuildData) {
 | 
			
		||||
  const { ctx, contentMap, mut, changesSinceLastBuild } = buildData
 | 
			
		||||
  const { argv, cfg } = ctx
 | 
			
		||||
 | 
			
		||||
  // don't do anything for gitignored files
 | 
			
		||||
  if (ignored(filepath)) {
 | 
			
		||||
    return
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  const buildId = randomIdNonSecure()
 | 
			
		||||
  ctx.buildId = buildId
 | 
			
		||||
  buildData.lastBuildMs = new Date().getTime()
 | 
			
		||||
  const numChangesInBuild = changes.length
 | 
			
		||||
  const release = await mut.acquire()
 | 
			
		||||
 | 
			
		||||
  // if there's another build after us, release and let them do it
 | 
			
		||||
@@ -171,261 +185,105 @@ async function partialRebuildFromEntrypoint(
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  const perf = new PerfTimer()
 | 
			
		||||
  perf.addEvent("rebuild")
 | 
			
		||||
  console.log(chalk.yellow("Detected change, rebuilding..."))
 | 
			
		||||
 | 
			
		||||
  // UPDATE DEP GRAPH
 | 
			
		||||
  const fp = joinSegments(argv.directory, toPosixPath(filepath)) as FilePath
 | 
			
		||||
  // update changesSinceLastBuild
 | 
			
		||||
  for (const change of changes) {
 | 
			
		||||
    changesSinceLastBuild[change.path] = change.type
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  const staticResources = getStaticResourcesFromPlugins(ctx)
 | 
			
		||||
  let processedFiles: ProcessedContent[] = []
 | 
			
		||||
 | 
			
		||||
  switch (action) {
 | 
			
		||||
    case "add":
 | 
			
		||||
      // add to cache when new file is added
 | 
			
		||||
      processedFiles = await parseMarkdown(ctx, [fp])
 | 
			
		||||
      processedFiles.forEach(([tree, vfile]) => contentMap.set(vfile.data.filePath!, [tree, vfile]))
 | 
			
		||||
 | 
			
		||||
      // update the dep graph by asking all emitters whether they depend on this file
 | 
			
		||||
      for (const emitter of cfg.plugins.emitters) {
 | 
			
		||||
        const emitterGraph =
 | 
			
		||||
          (await emitter.getDependencyGraph?.(ctx, processedFiles, staticResources)) ?? null
 | 
			
		||||
 | 
			
		||||
        if (emitterGraph) {
 | 
			
		||||
          const existingGraph = dependencies[emitter.name]
 | 
			
		||||
          if (existingGraph !== null) {
 | 
			
		||||
            existingGraph.mergeGraph(emitterGraph)
 | 
			
		||||
          } else {
 | 
			
		||||
            // might be the first time we're adding a mardown file
 | 
			
		||||
            dependencies[emitter.name] = emitterGraph
 | 
			
		||||
          }
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
      break
 | 
			
		||||
    case "change":
 | 
			
		||||
      // invalidate cache when file is changed
 | 
			
		||||
      processedFiles = await parseMarkdown(ctx, [fp])
 | 
			
		||||
      processedFiles.forEach(([tree, vfile]) => contentMap.set(vfile.data.filePath!, [tree, vfile]))
 | 
			
		||||
 | 
			
		||||
      // only content files can have added/removed dependencies because of transclusions
 | 
			
		||||
      if (path.extname(fp) === ".md") {
 | 
			
		||||
        for (const emitter of cfg.plugins.emitters) {
 | 
			
		||||
          // get new dependencies from all emitters for this file
 | 
			
		||||
          const emitterGraph =
 | 
			
		||||
            (await emitter.getDependencyGraph?.(ctx, processedFiles, staticResources)) ?? null
 | 
			
		||||
 | 
			
		||||
          // only update the graph if the emitter plugin uses the changed file
 | 
			
		||||
          // eg. Assets plugin ignores md files, so we skip updating the graph
 | 
			
		||||
          if (emitterGraph?.hasNode(fp)) {
 | 
			
		||||
            // merge the new dependencies into the dep graph
 | 
			
		||||
            dependencies[emitter.name]?.updateIncomingEdgesForNode(emitterGraph, fp)
 | 
			
		||||
          }
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
      break
 | 
			
		||||
    case "delete":
 | 
			
		||||
      toRemove.add(fp)
 | 
			
		||||
      break
 | 
			
		||||
  const pathsToParse: FilePath[] = []
 | 
			
		||||
  for (const [fp, type] of Object.entries(changesSinceLastBuild)) {
 | 
			
		||||
    if (type === "delete" || path.extname(fp) !== ".md") continue
 | 
			
		||||
    const fullPath = joinSegments(argv.directory, toPosixPath(fp)) as FilePath
 | 
			
		||||
    pathsToParse.push(fullPath)
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  if (argv.verbose) {
 | 
			
		||||
    console.log(`Updated dependency graphs in ${perf.timeSince()}`)
 | 
			
		||||
  const parsed = await parseMarkdown(ctx, pathsToParse)
 | 
			
		||||
  for (const content of parsed) {
 | 
			
		||||
    contentMap.set(content[1].data.relativePath!, {
 | 
			
		||||
      type: "markdown",
 | 
			
		||||
      content,
 | 
			
		||||
    })
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // EMIT
 | 
			
		||||
  perf.addEvent("rebuild")
 | 
			
		||||
  // update state using changesSinceLastBuild
 | 
			
		||||
  // we do this weird play of add => compute change events => remove
 | 
			
		||||
  // so that partialEmitters can do appropriate cleanup based on the content of deleted files
 | 
			
		||||
  for (const [file, change] of Object.entries(changesSinceLastBuild)) {
 | 
			
		||||
    if (change === "delete") {
 | 
			
		||||
      // universal delete case
 | 
			
		||||
      contentMap.delete(file as FilePath)
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    // manually track non-markdown files as processed files only
 | 
			
		||||
    // contains markdown files
 | 
			
		||||
    if (change === "add" && path.extname(file) !== ".md") {
 | 
			
		||||
      contentMap.set(file as FilePath, {
 | 
			
		||||
        type: "other",
 | 
			
		||||
      })
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  const changeEvents: ChangeEvent[] = Object.entries(changesSinceLastBuild).map(([fp, type]) => {
 | 
			
		||||
    const path = fp as FilePath
 | 
			
		||||
    const processedContent = contentMap.get(path)
 | 
			
		||||
    if (processedContent?.type === "markdown") {
 | 
			
		||||
      const [_tree, file] = processedContent.content
 | 
			
		||||
      return {
 | 
			
		||||
        type,
 | 
			
		||||
        path,
 | 
			
		||||
        file,
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    return {
 | 
			
		||||
      type,
 | 
			
		||||
      path,
 | 
			
		||||
    }
 | 
			
		||||
  })
 | 
			
		||||
 | 
			
		||||
  // update allFiles and then allSlugs with the consistent view of content map
 | 
			
		||||
  ctx.allFiles = Array.from(contentMap.keys())
 | 
			
		||||
  ctx.allSlugs = ctx.allFiles.map((fp) => slugifyFilePath(fp as FilePath))
 | 
			
		||||
  const processedFiles = Array.from(contentMap.values())
 | 
			
		||||
    .filter((file) => file.type === "markdown")
 | 
			
		||||
    .map((file) => file.content)
 | 
			
		||||
 | 
			
		||||
  let emittedFiles = 0
 | 
			
		||||
 | 
			
		||||
  for (const emitter of cfg.plugins.emitters) {
 | 
			
		||||
    const depGraph = dependencies[emitter.name]
 | 
			
		||||
 | 
			
		||||
    // emitter hasn't defined a dependency graph. call it with all processed files
 | 
			
		||||
    if (depGraph === null) {
 | 
			
		||||
      if (argv.verbose) {
 | 
			
		||||
        console.log(
 | 
			
		||||
          `Emitter ${emitter.name} doesn't define a dependency graph. Calling it with all files...`,
 | 
			
		||||
        )
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      const files = [...contentMap.values()].filter(
 | 
			
		||||
        ([_node, vfile]) => !toRemove.has(vfile.data.filePath!),
 | 
			
		||||
      )
 | 
			
		||||
 | 
			
		||||
      const emitted = await emitter.emit(ctx, files, staticResources)
 | 
			
		||||
      if (Symbol.asyncIterator in emitted) {
 | 
			
		||||
        // Async generator case
 | 
			
		||||
        for await (const file of emitted) {
 | 
			
		||||
          emittedFiles++
 | 
			
		||||
          if (ctx.argv.verbose) {
 | 
			
		||||
            console.log(`[emit:${emitter.name}] ${file}`)
 | 
			
		||||
          }
 | 
			
		||||
        }
 | 
			
		||||
      } else {
 | 
			
		||||
        // Array case
 | 
			
		||||
        emittedFiles += emitted.length
 | 
			
		||||
        if (ctx.argv.verbose) {
 | 
			
		||||
          for (const file of emitted) {
 | 
			
		||||
            console.log(`[emit:${emitter.name}] ${file}`)
 | 
			
		||||
          }
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
    // Try to use partialEmit if available, otherwise assume the output is static
 | 
			
		||||
    const emitFn = emitter.partialEmit ?? emitter.emit
 | 
			
		||||
    const emitted = await emitFn(ctx, processedFiles, staticResources, changeEvents)
 | 
			
		||||
    if (emitted === null) {
 | 
			
		||||
      continue
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    // only call the emitter if it uses this file
 | 
			
		||||
    if (depGraph.hasNode(fp)) {
 | 
			
		||||
      // re-emit using all files that are needed for the downstream of this file
 | 
			
		||||
      // eg. for ContentIndex, the dep graph could be:
 | 
			
		||||
      // a.md --> contentIndex.json
 | 
			
		||||
      // b.md ------^
 | 
			
		||||
      //
 | 
			
		||||
      // if a.md changes, we need to re-emit contentIndex.json,
 | 
			
		||||
      // and supply [a.md, b.md] to the emitter
 | 
			
		||||
      const upstreams = [...depGraph.getLeafNodeAncestors(fp)] as FilePath[]
 | 
			
		||||
 | 
			
		||||
      const upstreamContent = upstreams
 | 
			
		||||
        // filter out non-markdown files
 | 
			
		||||
        .filter((file) => contentMap.has(file))
 | 
			
		||||
        // if file was deleted, don't give it to the emitter
 | 
			
		||||
        .filter((file) => !toRemove.has(file))
 | 
			
		||||
        .map((file) => contentMap.get(file)!)
 | 
			
		||||
 | 
			
		||||
      const emitted = await emitter.emit(ctx, upstreamContent, staticResources)
 | 
			
		||||
      if (Symbol.asyncIterator in emitted) {
 | 
			
		||||
        // Async generator case
 | 
			
		||||
        for await (const file of emitted) {
 | 
			
		||||
          emittedFiles++
 | 
			
		||||
          if (ctx.argv.verbose) {
 | 
			
		||||
            console.log(`[emit:${emitter.name}] ${file}`)
 | 
			
		||||
          }
 | 
			
		||||
        }
 | 
			
		||||
      } else {
 | 
			
		||||
        // Array case
 | 
			
		||||
        emittedFiles += emitted.length
 | 
			
		||||
    if (Symbol.asyncIterator in emitted) {
 | 
			
		||||
      // Async generator case
 | 
			
		||||
      for await (const file of emitted) {
 | 
			
		||||
        emittedFiles++
 | 
			
		||||
        if (ctx.argv.verbose) {
 | 
			
		||||
          for (const file of emitted) {
 | 
			
		||||
            console.log(`[emit:${emitter.name}] ${file}`)
 | 
			
		||||
          }
 | 
			
		||||
          console.log(`[emit:${emitter.name}] ${file}`)
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
    } else {
 | 
			
		||||
      // Array case
 | 
			
		||||
      emittedFiles += emitted.length
 | 
			
		||||
      if (ctx.argv.verbose) {
 | 
			
		||||
        for (const file of emitted) {
 | 
			
		||||
          console.log(`[emit:${emitter.name}] ${file}`)
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  console.log(`Emitted ${emittedFiles} files to \`${argv.output}\` in ${perf.timeSince("rebuild")}`)
 | 
			
		||||
 | 
			
		||||
  // CLEANUP
 | 
			
		||||
  const destinationsToDelete = new Set<FilePath>()
 | 
			
		||||
  for (const file of toRemove) {
 | 
			
		||||
    // remove from cache
 | 
			
		||||
    contentMap.delete(file)
 | 
			
		||||
    Object.values(dependencies).forEach((depGraph) => {
 | 
			
		||||
      // remove the node from dependency graphs
 | 
			
		||||
      depGraph?.removeNode(file)
 | 
			
		||||
      // remove any orphan nodes. eg if a.md is deleted, a.html is orphaned and should be removed
 | 
			
		||||
      const orphanNodes = depGraph?.removeOrphanNodes()
 | 
			
		||||
      orphanNodes?.forEach((node) => {
 | 
			
		||||
        // only delete files that are in the output directory
 | 
			
		||||
        if (node.startsWith(argv.output)) {
 | 
			
		||||
          destinationsToDelete.add(node)
 | 
			
		||||
        }
 | 
			
		||||
      })
 | 
			
		||||
    })
 | 
			
		||||
  }
 | 
			
		||||
  await rimraf([...destinationsToDelete])
 | 
			
		||||
 | 
			
		||||
  console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
 | 
			
		||||
 | 
			
		||||
  toRemove.clear()
 | 
			
		||||
  release()
 | 
			
		||||
  changes.splice(0, numChangesInBuild)
 | 
			
		||||
  clientRefresh()
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
async function rebuildFromEntrypoint(
 | 
			
		||||
  fp: string,
 | 
			
		||||
  action: FileEvent,
 | 
			
		||||
  clientRefresh: () => void,
 | 
			
		||||
  buildData: BuildData, // note: this function mutates buildData
 | 
			
		||||
) {
 | 
			
		||||
  const { ctx, ignored, mut, initialSlugs, contentMap, toRebuild, toRemove, trackedAssets } =
 | 
			
		||||
    buildData
 | 
			
		||||
 | 
			
		||||
  const { argv } = ctx
 | 
			
		||||
 | 
			
		||||
  // don't do anything for gitignored files
 | 
			
		||||
  if (ignored(fp)) {
 | 
			
		||||
    return
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // dont bother rebuilding for non-content files, just track and refresh
 | 
			
		||||
  fp = toPosixPath(fp)
 | 
			
		||||
  const filePath = joinSegments(argv.directory, fp) as FilePath
 | 
			
		||||
  if (path.extname(fp) !== ".md") {
 | 
			
		||||
    if (action === "add" || action === "change") {
 | 
			
		||||
      trackedAssets.add(filePath)
 | 
			
		||||
    } else if (action === "delete") {
 | 
			
		||||
      trackedAssets.delete(filePath)
 | 
			
		||||
    }
 | 
			
		||||
    clientRefresh()
 | 
			
		||||
    return
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  if (action === "add" || action === "change") {
 | 
			
		||||
    toRebuild.add(filePath)
 | 
			
		||||
  } else if (action === "delete") {
 | 
			
		||||
    toRemove.add(filePath)
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  const buildId = randomIdNonSecure()
 | 
			
		||||
  ctx.buildId = buildId
 | 
			
		||||
  buildData.lastBuildMs = new Date().getTime()
 | 
			
		||||
  const release = await mut.acquire()
 | 
			
		||||
 | 
			
		||||
  // there's another build after us, release and let them do it
 | 
			
		||||
  if (ctx.buildId !== buildId) {
 | 
			
		||||
    release()
 | 
			
		||||
    return
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  const perf = new PerfTimer()
 | 
			
		||||
  console.log(chalk.yellow("Detected change, rebuilding..."))
 | 
			
		||||
 | 
			
		||||
  try {
 | 
			
		||||
    const filesToRebuild = [...toRebuild].filter((fp) => !toRemove.has(fp))
 | 
			
		||||
    const parsedContent = await parseMarkdown(ctx, filesToRebuild)
 | 
			
		||||
    for (const content of parsedContent) {
 | 
			
		||||
      const [_tree, vfile] = content
 | 
			
		||||
      contentMap.set(vfile.data.filePath!, content)
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    for (const fp of toRemove) {
 | 
			
		||||
      contentMap.delete(fp)
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    const parsedFiles = [...contentMap.values()]
 | 
			
		||||
    const filteredContent = filterContent(ctx, parsedFiles)
 | 
			
		||||
 | 
			
		||||
    // re-update slugs
 | 
			
		||||
    const trackedSlugs = [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])]
 | 
			
		||||
      .filter((fp) => !toRemove.has(fp))
 | 
			
		||||
      .map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath))
 | 
			
		||||
 | 
			
		||||
    ctx.allSlugs = [...new Set([...initialSlugs, ...trackedSlugs])]
 | 
			
		||||
 | 
			
		||||
    // TODO: we can probably traverse the link graph to figure out what's safe to delete here
 | 
			
		||||
    // instead of just deleting everything
 | 
			
		||||
    await rimraf(path.join(argv.output, ".*"), { glob: true })
 | 
			
		||||
    await emitContent(ctx, filteredContent)
 | 
			
		||||
    console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
 | 
			
		||||
  } catch (err) {
 | 
			
		||||
    console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`))
 | 
			
		||||
    if (argv.verbose) {
 | 
			
		||||
      console.log(chalk.red(err))
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  clientRefresh()
 | 
			
		||||
  toRebuild.clear()
 | 
			
		||||
  toRemove.clear()
 | 
			
		||||
  release()
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -2,7 +2,6 @@ import { ValidDateType } from "./components/Date"
 | 
			
		||||
import { QuartzComponent } from "./components/types"
 | 
			
		||||
import { ValidLocale } from "./i18n"
 | 
			
		||||
import { PluginTypes } from "./plugins/types"
 | 
			
		||||
import { SocialImageOptions } from "./util/og"
 | 
			
		||||
import { Theme } from "./util/theme"
 | 
			
		||||
 | 
			
		||||
export type Analytics =
 | 
			
		||||
 
 | 
			
		||||
@@ -71,10 +71,10 @@ export const BuildArgv = {
 | 
			
		||||
    default: false,
 | 
			
		||||
    describe: "run a local server to live-preview your Quartz",
 | 
			
		||||
  },
 | 
			
		||||
  fastRebuild: {
 | 
			
		||||
  watch: {
 | 
			
		||||
    boolean: true,
 | 
			
		||||
    default: false,
 | 
			
		||||
    describe: "[experimental] rebuild only the changed files",
 | 
			
		||||
    describe: "watch for changes and rebuild automatically",
 | 
			
		||||
  },
 | 
			
		||||
  baseDir: {
 | 
			
		||||
    string: true,
 | 
			
		||||
 
 | 
			
		||||
@@ -225,6 +225,10 @@ See the [documentation](https://quartz.jzhao.xyz) for how to get started.
 | 
			
		||||
 * @param {*} argv arguments for `build`
 | 
			
		||||
 */
 | 
			
		||||
export async function handleBuild(argv) {
 | 
			
		||||
  if (argv.serve) {
 | 
			
		||||
    argv.watch = true
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  console.log(chalk.bgGreen.black(`\n Quartz v${version} \n`))
 | 
			
		||||
  const ctx = await esbuild.context({
 | 
			
		||||
    entryPoints: [fp],
 | 
			
		||||
@@ -331,9 +335,10 @@ export async function handleBuild(argv) {
 | 
			
		||||
    clientRefresh()
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  let clientRefresh = () => {}
 | 
			
		||||
  if (argv.serve) {
 | 
			
		||||
    const connections = []
 | 
			
		||||
    const clientRefresh = () => connections.forEach((conn) => conn.send("rebuild"))
 | 
			
		||||
    clientRefresh = () => connections.forEach((conn) => conn.send("rebuild"))
 | 
			
		||||
 | 
			
		||||
    if (argv.baseDir !== "" && !argv.baseDir.startsWith("/")) {
 | 
			
		||||
      argv.baseDir = "/" + argv.baseDir
 | 
			
		||||
@@ -433,6 +438,7 @@ export async function handleBuild(argv) {
 | 
			
		||||
 | 
			
		||||
      return serve()
 | 
			
		||||
    })
 | 
			
		||||
 | 
			
		||||
    server.listen(argv.port)
 | 
			
		||||
    const wss = new WebSocketServer({ port: argv.wsPort })
 | 
			
		||||
    wss.on("connection", (ws) => connections.push(ws))
 | 
			
		||||
@@ -441,16 +447,27 @@ export async function handleBuild(argv) {
 | 
			
		||||
        `Started a Quartz server listening at http://localhost:${argv.port}${argv.baseDir}`,
 | 
			
		||||
      ),
 | 
			
		||||
    )
 | 
			
		||||
    console.log("hint: exit with ctrl+c")
 | 
			
		||||
    const paths = await globby(["**/*.ts", "**/*.tsx", "**/*.scss", "package.json"])
 | 
			
		||||
  } else {
 | 
			
		||||
    await build(clientRefresh)
 | 
			
		||||
    ctx.dispose()
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  if (argv.watch) {
 | 
			
		||||
    const paths = await globby([
 | 
			
		||||
      "**/*.ts",
 | 
			
		||||
      "quartz/cli/*.js",
 | 
			
		||||
      "quartz/static/**/*",
 | 
			
		||||
      "**/*.tsx",
 | 
			
		||||
      "**/*.scss",
 | 
			
		||||
      "package.json",
 | 
			
		||||
    ])
 | 
			
		||||
    chokidar
 | 
			
		||||
      .watch(paths, { ignoreInitial: true })
 | 
			
		||||
      .on("add", () => build(clientRefresh))
 | 
			
		||||
      .on("change", () => build(clientRefresh))
 | 
			
		||||
      .on("unlink", () => build(clientRefresh))
 | 
			
		||||
  } else {
 | 
			
		||||
    await build(() => {})
 | 
			
		||||
    ctx.dispose()
 | 
			
		||||
 | 
			
		||||
    console.log(chalk.grey("hint: exit with ctrl+c"))
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -9,7 +9,6 @@ import { visit } from "unist-util-visit"
 | 
			
		||||
import { Root, Element, ElementContent } from "hast"
 | 
			
		||||
import { GlobalConfiguration } from "../cfg"
 | 
			
		||||
import { i18n } from "../i18n"
 | 
			
		||||
import { QuartzPluginData } from "../plugins/vfile"
 | 
			
		||||
 | 
			
		||||
interface RenderComponents {
 | 
			
		||||
  head: QuartzComponent
 | 
			
		||||
@@ -25,7 +24,6 @@ interface RenderComponents {
 | 
			
		||||
const headerRegex = new RegExp(/h[1-6]/)
 | 
			
		||||
export function pageResources(
 | 
			
		||||
  baseDir: FullSlug | RelativeURL,
 | 
			
		||||
  fileData: QuartzPluginData,
 | 
			
		||||
  staticResources: StaticResources,
 | 
			
		||||
): StaticResources {
 | 
			
		||||
  const contentIndexPath = joinSegments(baseDir, "static/contentIndex.json")
 | 
			
		||||
@@ -65,17 +63,12 @@ export function pageResources(
 | 
			
		||||
  return resources
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export function renderPage(
 | 
			
		||||
function renderTranscludes(
 | 
			
		||||
  root: Root,
 | 
			
		||||
  cfg: GlobalConfiguration,
 | 
			
		||||
  slug: FullSlug,
 | 
			
		||||
  componentData: QuartzComponentProps,
 | 
			
		||||
  components: RenderComponents,
 | 
			
		||||
  pageResources: StaticResources,
 | 
			
		||||
): string {
 | 
			
		||||
  // make a deep copy of the tree so we don't remove the transclusion references
 | 
			
		||||
  // for the file cached in contentMap in build.ts
 | 
			
		||||
  const root = clone(componentData.tree) as Root
 | 
			
		||||
 | 
			
		||||
) {
 | 
			
		||||
  // process transcludes in componentData
 | 
			
		||||
  visit(root, "element", (node, _index, _parent) => {
 | 
			
		||||
    if (node.tagName === "blockquote") {
 | 
			
		||||
@@ -191,6 +184,19 @@ export function renderPage(
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
  })
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export function renderPage(
 | 
			
		||||
  cfg: GlobalConfiguration,
 | 
			
		||||
  slug: FullSlug,
 | 
			
		||||
  componentData: QuartzComponentProps,
 | 
			
		||||
  components: RenderComponents,
 | 
			
		||||
  pageResources: StaticResources,
 | 
			
		||||
): string {
 | 
			
		||||
  // make a deep copy of the tree so we don't remove the transclusion references
 | 
			
		||||
  // for the file cached in contentMap in build.ts
 | 
			
		||||
  const root = clone(componentData.tree) as Root
 | 
			
		||||
  renderTranscludes(root, cfg, slug, componentData)
 | 
			
		||||
 | 
			
		||||
  // set componentData.tree to the edited html that has transclusions rendered
 | 
			
		||||
  componentData.tree = root
 | 
			
		||||
 
 | 
			
		||||
@@ -10,7 +10,7 @@ const emitThemeChangeEvent = (theme: "light" | "dark") => {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
document.addEventListener("nav", () => {
 | 
			
		||||
  const switchTheme = (e: Event) => {
 | 
			
		||||
  const switchTheme = () => {
 | 
			
		||||
    const newTheme =
 | 
			
		||||
      document.documentElement.getAttribute("saved-theme") === "dark" ? "light" : "dark"
 | 
			
		||||
    document.documentElement.setAttribute("saved-theme", newTheme)
 | 
			
		||||
 
 | 
			
		||||
@@ -1,118 +0,0 @@
 | 
			
		||||
import test, { describe } from "node:test"
 | 
			
		||||
import DepGraph from "./depgraph"
 | 
			
		||||
import assert from "node:assert"
 | 
			
		||||
 | 
			
		||||
describe("DepGraph", () => {
 | 
			
		||||
  test("getLeafNodes", () => {
 | 
			
		||||
    const graph = new DepGraph<string>()
 | 
			
		||||
    graph.addEdge("A", "B")
 | 
			
		||||
    graph.addEdge("B", "C")
 | 
			
		||||
    graph.addEdge("D", "C")
 | 
			
		||||
    assert.deepStrictEqual(graph.getLeafNodes("A"), new Set(["C"]))
 | 
			
		||||
    assert.deepStrictEqual(graph.getLeafNodes("B"), new Set(["C"]))
 | 
			
		||||
    assert.deepStrictEqual(graph.getLeafNodes("C"), new Set(["C"]))
 | 
			
		||||
    assert.deepStrictEqual(graph.getLeafNodes("D"), new Set(["C"]))
 | 
			
		||||
  })
 | 
			
		||||
 | 
			
		||||
  describe("getLeafNodeAncestors", () => {
 | 
			
		||||
    test("gets correct ancestors in a graph without cycles", () => {
 | 
			
		||||
      const graph = new DepGraph<string>()
 | 
			
		||||
      graph.addEdge("A", "B")
 | 
			
		||||
      graph.addEdge("B", "C")
 | 
			
		||||
      graph.addEdge("D", "B")
 | 
			
		||||
      assert.deepStrictEqual(graph.getLeafNodeAncestors("A"), new Set(["A", "B", "D"]))
 | 
			
		||||
      assert.deepStrictEqual(graph.getLeafNodeAncestors("B"), new Set(["A", "B", "D"]))
 | 
			
		||||
      assert.deepStrictEqual(graph.getLeafNodeAncestors("C"), new Set(["A", "B", "D"]))
 | 
			
		||||
      assert.deepStrictEqual(graph.getLeafNodeAncestors("D"), new Set(["A", "B", "D"]))
 | 
			
		||||
    })
 | 
			
		||||
 | 
			
		||||
    test("gets correct ancestors in a graph with cycles", () => {
 | 
			
		||||
      const graph = new DepGraph<string>()
 | 
			
		||||
      graph.addEdge("A", "B")
 | 
			
		||||
      graph.addEdge("B", "C")
 | 
			
		||||
      graph.addEdge("C", "A")
 | 
			
		||||
      graph.addEdge("C", "D")
 | 
			
		||||
      assert.deepStrictEqual(graph.getLeafNodeAncestors("A"), new Set(["A", "B", "C"]))
 | 
			
		||||
      assert.deepStrictEqual(graph.getLeafNodeAncestors("B"), new Set(["A", "B", "C"]))
 | 
			
		||||
      assert.deepStrictEqual(graph.getLeafNodeAncestors("C"), new Set(["A", "B", "C"]))
 | 
			
		||||
      assert.deepStrictEqual(graph.getLeafNodeAncestors("D"), new Set(["A", "B", "C"]))
 | 
			
		||||
    })
 | 
			
		||||
  })
 | 
			
		||||
 | 
			
		||||
  describe("mergeGraph", () => {
 | 
			
		||||
    test("merges two graphs", () => {
 | 
			
		||||
      const graph = new DepGraph<string>()
 | 
			
		||||
      graph.addEdge("A.md", "A.html")
 | 
			
		||||
 | 
			
		||||
      const other = new DepGraph<string>()
 | 
			
		||||
      other.addEdge("B.md", "B.html")
 | 
			
		||||
 | 
			
		||||
      graph.mergeGraph(other)
 | 
			
		||||
 | 
			
		||||
      const expected = {
 | 
			
		||||
        nodes: ["A.md", "A.html", "B.md", "B.html"],
 | 
			
		||||
        edges: [
 | 
			
		||||
          ["A.md", "A.html"],
 | 
			
		||||
          ["B.md", "B.html"],
 | 
			
		||||
        ],
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      assert.deepStrictEqual(graph.export(), expected)
 | 
			
		||||
    })
 | 
			
		||||
  })
 | 
			
		||||
 | 
			
		||||
  describe("updateIncomingEdgesForNode", () => {
 | 
			
		||||
    test("merges when node exists", () => {
 | 
			
		||||
      // A.md -> B.md -> B.html
 | 
			
		||||
      const graph = new DepGraph<string>()
 | 
			
		||||
      graph.addEdge("A.md", "B.md")
 | 
			
		||||
      graph.addEdge("B.md", "B.html")
 | 
			
		||||
 | 
			
		||||
      // B.md is edited so it removes the A.md transclusion
 | 
			
		||||
      // and adds C.md transclusion
 | 
			
		||||
      // C.md -> B.md
 | 
			
		||||
      const other = new DepGraph<string>()
 | 
			
		||||
      other.addEdge("C.md", "B.md")
 | 
			
		||||
      other.addEdge("B.md", "B.html")
 | 
			
		||||
 | 
			
		||||
      // A.md -> B.md removed, C.md -> B.md added
 | 
			
		||||
      // C.md -> B.md -> B.html
 | 
			
		||||
      graph.updateIncomingEdgesForNode(other, "B.md")
 | 
			
		||||
 | 
			
		||||
      const expected = {
 | 
			
		||||
        nodes: ["A.md", "B.md", "B.html", "C.md"],
 | 
			
		||||
        edges: [
 | 
			
		||||
          ["B.md", "B.html"],
 | 
			
		||||
          ["C.md", "B.md"],
 | 
			
		||||
        ],
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      assert.deepStrictEqual(graph.export(), expected)
 | 
			
		||||
    })
 | 
			
		||||
 | 
			
		||||
    test("adds node if it does not exist", () => {
 | 
			
		||||
      // A.md -> B.md
 | 
			
		||||
      const graph = new DepGraph<string>()
 | 
			
		||||
      graph.addEdge("A.md", "B.md")
 | 
			
		||||
 | 
			
		||||
      // Add a new file C.md that transcludes B.md
 | 
			
		||||
      // B.md -> C.md
 | 
			
		||||
      const other = new DepGraph<string>()
 | 
			
		||||
      other.addEdge("B.md", "C.md")
 | 
			
		||||
 | 
			
		||||
      // B.md -> C.md added
 | 
			
		||||
      // A.md -> B.md -> C.md
 | 
			
		||||
      graph.updateIncomingEdgesForNode(other, "C.md")
 | 
			
		||||
 | 
			
		||||
      const expected = {
 | 
			
		||||
        nodes: ["A.md", "B.md", "C.md"],
 | 
			
		||||
        edges: [
 | 
			
		||||
          ["A.md", "B.md"],
 | 
			
		||||
          ["B.md", "C.md"],
 | 
			
		||||
        ],
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      assert.deepStrictEqual(graph.export(), expected)
 | 
			
		||||
    })
 | 
			
		||||
  })
 | 
			
		||||
})
 | 
			
		||||
@@ -1,228 +0,0 @@
 | 
			
		||||
export default class DepGraph<T> {
 | 
			
		||||
  // node: incoming and outgoing edges
 | 
			
		||||
  _graph = new Map<T, { incoming: Set<T>; outgoing: Set<T> }>()
 | 
			
		||||
 | 
			
		||||
  constructor() {
 | 
			
		||||
    this._graph = new Map()
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  export(): Object {
 | 
			
		||||
    return {
 | 
			
		||||
      nodes: this.nodes,
 | 
			
		||||
      edges: this.edges,
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  toString(): string {
 | 
			
		||||
    return JSON.stringify(this.export(), null, 2)
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // BASIC GRAPH OPERATIONS
 | 
			
		||||
 | 
			
		||||
  get nodes(): T[] {
 | 
			
		||||
    return Array.from(this._graph.keys())
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  get edges(): [T, T][] {
 | 
			
		||||
    let edges: [T, T][] = []
 | 
			
		||||
    this.forEachEdge((edge) => edges.push(edge))
 | 
			
		||||
    return edges
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  hasNode(node: T): boolean {
 | 
			
		||||
    return this._graph.has(node)
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  addNode(node: T): void {
 | 
			
		||||
    if (!this._graph.has(node)) {
 | 
			
		||||
      this._graph.set(node, { incoming: new Set(), outgoing: new Set() })
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // Remove node and all edges connected to it
 | 
			
		||||
  removeNode(node: T): void {
 | 
			
		||||
    if (this._graph.has(node)) {
 | 
			
		||||
      // first remove all edges so other nodes don't have references to this node
 | 
			
		||||
      for (const target of this._graph.get(node)!.outgoing) {
 | 
			
		||||
        this.removeEdge(node, target)
 | 
			
		||||
      }
 | 
			
		||||
      for (const source of this._graph.get(node)!.incoming) {
 | 
			
		||||
        this.removeEdge(source, node)
 | 
			
		||||
      }
 | 
			
		||||
      this._graph.delete(node)
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  forEachNode(callback: (node: T) => void): void {
 | 
			
		||||
    for (const node of this._graph.keys()) {
 | 
			
		||||
      callback(node)
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  hasEdge(from: T, to: T): boolean {
 | 
			
		||||
    return Boolean(this._graph.get(from)?.outgoing.has(to))
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  addEdge(from: T, to: T): void {
 | 
			
		||||
    this.addNode(from)
 | 
			
		||||
    this.addNode(to)
 | 
			
		||||
 | 
			
		||||
    this._graph.get(from)!.outgoing.add(to)
 | 
			
		||||
    this._graph.get(to)!.incoming.add(from)
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  removeEdge(from: T, to: T): void {
 | 
			
		||||
    if (this._graph.has(from) && this._graph.has(to)) {
 | 
			
		||||
      this._graph.get(from)!.outgoing.delete(to)
 | 
			
		||||
      this._graph.get(to)!.incoming.delete(from)
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // returns -1 if node does not exist
 | 
			
		||||
  outDegree(node: T): number {
 | 
			
		||||
    return this.hasNode(node) ? this._graph.get(node)!.outgoing.size : -1
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // returns -1 if node does not exist
 | 
			
		||||
  inDegree(node: T): number {
 | 
			
		||||
    return this.hasNode(node) ? this._graph.get(node)!.incoming.size : -1
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  forEachOutNeighbor(node: T, callback: (neighbor: T) => void): void {
 | 
			
		||||
    this._graph.get(node)?.outgoing.forEach(callback)
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  forEachInNeighbor(node: T, callback: (neighbor: T) => void): void {
 | 
			
		||||
    this._graph.get(node)?.incoming.forEach(callback)
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  forEachEdge(callback: (edge: [T, T]) => void): void {
 | 
			
		||||
    for (const [source, { outgoing }] of this._graph.entries()) {
 | 
			
		||||
      for (const target of outgoing) {
 | 
			
		||||
        callback([source, target])
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // DEPENDENCY ALGORITHMS
 | 
			
		||||
 | 
			
		||||
  // Add all nodes and edges from other graph to this graph
 | 
			
		||||
  mergeGraph(other: DepGraph<T>): void {
 | 
			
		||||
    other.forEachEdge(([source, target]) => {
 | 
			
		||||
      this.addNode(source)
 | 
			
		||||
      this.addNode(target)
 | 
			
		||||
      this.addEdge(source, target)
 | 
			
		||||
    })
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // For the node provided:
 | 
			
		||||
  // If node does not exist, add it
 | 
			
		||||
  // If an incoming edge was added in other, it is added in this graph
 | 
			
		||||
  // If an incoming edge was deleted in other, it is deleted in this graph
 | 
			
		||||
  updateIncomingEdgesForNode(other: DepGraph<T>, node: T): void {
 | 
			
		||||
    this.addNode(node)
 | 
			
		||||
 | 
			
		||||
    // Add edge if it is present in other
 | 
			
		||||
    other.forEachInNeighbor(node, (neighbor) => {
 | 
			
		||||
      this.addEdge(neighbor, node)
 | 
			
		||||
    })
 | 
			
		||||
 | 
			
		||||
    // For node provided, remove incoming edge if it is absent in other
 | 
			
		||||
    this.forEachEdge(([source, target]) => {
 | 
			
		||||
      if (target === node && !other.hasEdge(source, target)) {
 | 
			
		||||
        this.removeEdge(source, target)
 | 
			
		||||
      }
 | 
			
		||||
    })
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // Remove all nodes that do not have any incoming or outgoing edges
 | 
			
		||||
  // A node may be orphaned if the only node pointing to it was removed
 | 
			
		||||
  removeOrphanNodes(): Set<T> {
 | 
			
		||||
    let orphanNodes = new Set<T>()
 | 
			
		||||
 | 
			
		||||
    this.forEachNode((node) => {
 | 
			
		||||
      if (this.inDegree(node) === 0 && this.outDegree(node) === 0) {
 | 
			
		||||
        orphanNodes.add(node)
 | 
			
		||||
      }
 | 
			
		||||
    })
 | 
			
		||||
 | 
			
		||||
    orphanNodes.forEach((node) => {
 | 
			
		||||
      this.removeNode(node)
 | 
			
		||||
    })
 | 
			
		||||
 | 
			
		||||
    return orphanNodes
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // Get all leaf nodes (i.e. destination paths) reachable from the node provided
 | 
			
		||||
  // Eg. if the graph is A -> B -> C
 | 
			
		||||
  //                     D ---^
 | 
			
		||||
  // and the node is B, this function returns [C]
 | 
			
		||||
  getLeafNodes(node: T): Set<T> {
 | 
			
		||||
    let stack: T[] = [node]
 | 
			
		||||
    let visited = new Set<T>()
 | 
			
		||||
    let leafNodes = new Set<T>()
 | 
			
		||||
 | 
			
		||||
    // DFS
 | 
			
		||||
    while (stack.length > 0) {
 | 
			
		||||
      let node = stack.pop()!
 | 
			
		||||
 | 
			
		||||
      // If the node is already visited, skip it
 | 
			
		||||
      if (visited.has(node)) {
 | 
			
		||||
        continue
 | 
			
		||||
      }
 | 
			
		||||
      visited.add(node)
 | 
			
		||||
 | 
			
		||||
      // Check if the node is a leaf node (i.e. destination path)
 | 
			
		||||
      if (this.outDegree(node) === 0) {
 | 
			
		||||
        leafNodes.add(node)
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      // Add all unvisited neighbors to the stack
 | 
			
		||||
      this.forEachOutNeighbor(node, (neighbor) => {
 | 
			
		||||
        if (!visited.has(neighbor)) {
 | 
			
		||||
          stack.push(neighbor)
 | 
			
		||||
        }
 | 
			
		||||
      })
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    return leafNodes
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // Get all ancestors of the leaf nodes reachable from the node provided
 | 
			
		||||
  // Eg. if the graph is A -> B -> C
 | 
			
		||||
  //                     D ---^
 | 
			
		||||
  // and the node is B, this function returns [A, B, D]
 | 
			
		||||
  getLeafNodeAncestors(node: T): Set<T> {
 | 
			
		||||
    const leafNodes = this.getLeafNodes(node)
 | 
			
		||||
    let visited = new Set<T>()
 | 
			
		||||
    let upstreamNodes = new Set<T>()
 | 
			
		||||
 | 
			
		||||
    // Backwards DFS for each leaf node
 | 
			
		||||
    leafNodes.forEach((leafNode) => {
 | 
			
		||||
      let stack: T[] = [leafNode]
 | 
			
		||||
 | 
			
		||||
      while (stack.length > 0) {
 | 
			
		||||
        let node = stack.pop()!
 | 
			
		||||
 | 
			
		||||
        if (visited.has(node)) {
 | 
			
		||||
          continue
 | 
			
		||||
        }
 | 
			
		||||
        visited.add(node)
 | 
			
		||||
        // Add node if it's not a leaf node (i.e. destination path)
 | 
			
		||||
        // Assumes destination file cannot depend on another destination file
 | 
			
		||||
        if (this.outDegree(node) !== 0) {
 | 
			
		||||
          upstreamNodes.add(node)
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        // Add all unvisited parents to the stack
 | 
			
		||||
        this.forEachInNeighbor(node, (parentNode) => {
 | 
			
		||||
          if (!visited.has(parentNode)) {
 | 
			
		||||
            stack.push(parentNode)
 | 
			
		||||
          }
 | 
			
		||||
        })
 | 
			
		||||
      }
 | 
			
		||||
    })
 | 
			
		||||
 | 
			
		||||
    return upstreamNodes
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
@@ -3,13 +3,12 @@ import { QuartzComponentProps } from "../../components/types"
 | 
			
		||||
import BodyConstructor from "../../components/Body"
 | 
			
		||||
import { pageResources, renderPage } from "../../components/renderPage"
 | 
			
		||||
import { FullPageLayout } from "../../cfg"
 | 
			
		||||
import { FilePath, FullSlug } from "../../util/path"
 | 
			
		||||
import { FullSlug } from "../../util/path"
 | 
			
		||||
import { sharedPageComponents } from "../../../quartz.layout"
 | 
			
		||||
import { NotFound } from "../../components"
 | 
			
		||||
import { defaultProcessedContent } from "../vfile"
 | 
			
		||||
import { write } from "./helpers"
 | 
			
		||||
import { i18n } from "../../i18n"
 | 
			
		||||
import DepGraph from "../../depgraph"
 | 
			
		||||
 | 
			
		||||
export const NotFoundPage: QuartzEmitterPlugin = () => {
 | 
			
		||||
  const opts: FullPageLayout = {
 | 
			
		||||
@@ -28,9 +27,6 @@ export const NotFoundPage: QuartzEmitterPlugin = () => {
 | 
			
		||||
    getQuartzComponents() {
 | 
			
		||||
      return [Head, Body, pageBody, Footer]
 | 
			
		||||
    },
 | 
			
		||||
    async getDependencyGraph(_ctx, _content, _resources) {
 | 
			
		||||
      return new DepGraph<FilePath>()
 | 
			
		||||
    },
 | 
			
		||||
    async *emit(ctx, _content, resources) {
 | 
			
		||||
      const cfg = ctx.cfg.configuration
 | 
			
		||||
      const slug = "404" as FullSlug
 | 
			
		||||
@@ -44,7 +40,7 @@ export const NotFoundPage: QuartzEmitterPlugin = () => {
 | 
			
		||||
        description: notFound,
 | 
			
		||||
        frontmatter: { title: notFound, tags: [] },
 | 
			
		||||
      })
 | 
			
		||||
      const externalResources = pageResources(path, vfile.data, resources)
 | 
			
		||||
      const externalResources = pageResources(path, resources)
 | 
			
		||||
      const componentData: QuartzComponentProps = {
 | 
			
		||||
        ctx,
 | 
			
		||||
        fileData: vfile.data,
 | 
			
		||||
@@ -62,5 +58,6 @@ export const NotFoundPage: QuartzEmitterPlugin = () => {
 | 
			
		||||
        ext: ".html",
 | 
			
		||||
      })
 | 
			
		||||
    },
 | 
			
		||||
    async *partialEmit() {},
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -1,46 +1,47 @@
 | 
			
		||||
import { FilePath, joinSegments, resolveRelative, simplifySlug } from "../../util/path"
 | 
			
		||||
import { resolveRelative, simplifySlug } from "../../util/path"
 | 
			
		||||
import { QuartzEmitterPlugin } from "../types"
 | 
			
		||||
import { write } from "./helpers"
 | 
			
		||||
import DepGraph from "../../depgraph"
 | 
			
		||||
import { getAliasSlugs } from "../transformers/frontmatter"
 | 
			
		||||
import { BuildCtx } from "../../util/ctx"
 | 
			
		||||
import { VFile } from "vfile"
 | 
			
		||||
 | 
			
		||||
async function* processFile(ctx: BuildCtx, file: VFile) {
 | 
			
		||||
  const ogSlug = simplifySlug(file.data.slug!)
 | 
			
		||||
 | 
			
		||||
  for (const slug of file.data.aliases ?? []) {
 | 
			
		||||
    const redirUrl = resolveRelative(slug, file.data.slug!)
 | 
			
		||||
    yield write({
 | 
			
		||||
      ctx,
 | 
			
		||||
      content: `
 | 
			
		||||
        <!DOCTYPE html>
 | 
			
		||||
        <html lang="en-us">
 | 
			
		||||
        <head>
 | 
			
		||||
        <title>${ogSlug}</title>
 | 
			
		||||
        <link rel="canonical" href="${redirUrl}">
 | 
			
		||||
        <meta name="robots" content="noindex">
 | 
			
		||||
        <meta charset="utf-8">
 | 
			
		||||
        <meta http-equiv="refresh" content="0; url=${redirUrl}">
 | 
			
		||||
        </head>
 | 
			
		||||
        </html>
 | 
			
		||||
        `,
 | 
			
		||||
      slug,
 | 
			
		||||
      ext: ".html",
 | 
			
		||||
    })
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export const AliasRedirects: QuartzEmitterPlugin = () => ({
 | 
			
		||||
  name: "AliasRedirects",
 | 
			
		||||
  async getDependencyGraph(ctx, content, _resources) {
 | 
			
		||||
    const graph = new DepGraph<FilePath>()
 | 
			
		||||
 | 
			
		||||
    const { argv } = ctx
 | 
			
		||||
  async *emit(ctx, content) {
 | 
			
		||||
    for (const [_tree, file] of content) {
 | 
			
		||||
      for (const slug of getAliasSlugs(file.data.frontmatter?.aliases ?? [], argv, file)) {
 | 
			
		||||
        graph.addEdge(file.data.filePath!, joinSegments(argv.output, slug + ".html") as FilePath)
 | 
			
		||||
      }
 | 
			
		||||
      yield* processFile(ctx, file)
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    return graph
 | 
			
		||||
  },
 | 
			
		||||
  async *emit(ctx, content, _resources) {
 | 
			
		||||
    for (const [_tree, file] of content) {
 | 
			
		||||
      const ogSlug = simplifySlug(file.data.slug!)
 | 
			
		||||
 | 
			
		||||
      for (const slug of file.data.aliases ?? []) {
 | 
			
		||||
        const redirUrl = resolveRelative(slug, file.data.slug!)
 | 
			
		||||
        yield write({
 | 
			
		||||
          ctx,
 | 
			
		||||
          content: `
 | 
			
		||||
            <!DOCTYPE html>
 | 
			
		||||
            <html lang="en-us">
 | 
			
		||||
            <head>
 | 
			
		||||
            <title>${ogSlug}</title>
 | 
			
		||||
            <link rel="canonical" href="${redirUrl}">
 | 
			
		||||
            <meta name="robots" content="noindex">
 | 
			
		||||
            <meta charset="utf-8">
 | 
			
		||||
            <meta http-equiv="refresh" content="0; url=${redirUrl}">
 | 
			
		||||
            </head>
 | 
			
		||||
            </html>
 | 
			
		||||
            `,
 | 
			
		||||
          slug,
 | 
			
		||||
          ext: ".html",
 | 
			
		||||
        })
 | 
			
		||||
  async *partialEmit(ctx, _content, _resources, changeEvents) {
 | 
			
		||||
    for (const changeEvent of changeEvents) {
 | 
			
		||||
      if (!changeEvent.file) continue
 | 
			
		||||
      if (changeEvent.type === "add" || changeEvent.type === "change") {
 | 
			
		||||
        // add new ones if this file still exists
 | 
			
		||||
        yield* processFile(ctx, changeEvent.file)
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
  },
 | 
			
		||||
 
 | 
			
		||||
@@ -3,7 +3,6 @@ import { QuartzEmitterPlugin } from "../types"
 | 
			
		||||
import path from "path"
 | 
			
		||||
import fs from "fs"
 | 
			
		||||
import { glob } from "../../util/glob"
 | 
			
		||||
import DepGraph from "../../depgraph"
 | 
			
		||||
import { Argv } from "../../util/ctx"
 | 
			
		||||
import { QuartzConfig } from "../../cfg"
 | 
			
		||||
 | 
			
		||||
@@ -12,40 +11,41 @@ const filesToCopy = async (argv: Argv, cfg: QuartzConfig) => {
 | 
			
		||||
  return await glob("**", argv.directory, ["**/*.md", ...cfg.configuration.ignorePatterns])
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
const copyFile = async (argv: Argv, fp: FilePath) => {
 | 
			
		||||
  const src = joinSegments(argv.directory, fp) as FilePath
 | 
			
		||||
 | 
			
		||||
  const name = slugifyFilePath(fp)
 | 
			
		||||
  const dest = joinSegments(argv.output, name) as FilePath
 | 
			
		||||
 | 
			
		||||
  // ensure dir exists
 | 
			
		||||
  const dir = path.dirname(dest) as FilePath
 | 
			
		||||
  await fs.promises.mkdir(dir, { recursive: true })
 | 
			
		||||
 | 
			
		||||
  await fs.promises.copyFile(src, dest)
 | 
			
		||||
  return dest
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export const Assets: QuartzEmitterPlugin = () => {
 | 
			
		||||
  return {
 | 
			
		||||
    name: "Assets",
 | 
			
		||||
    async getDependencyGraph(ctx, _content, _resources) {
 | 
			
		||||
      const { argv, cfg } = ctx
 | 
			
		||||
      const graph = new DepGraph<FilePath>()
 | 
			
		||||
 | 
			
		||||
    async *emit({ argv, cfg }) {
 | 
			
		||||
      const fps = await filesToCopy(argv, cfg)
 | 
			
		||||
 | 
			
		||||
      for (const fp of fps) {
 | 
			
		||||
        const ext = path.extname(fp)
 | 
			
		||||
        const src = joinSegments(argv.directory, fp) as FilePath
 | 
			
		||||
        const name = (slugifyFilePath(fp as FilePath, true) + ext) as FilePath
 | 
			
		||||
 | 
			
		||||
        const dest = joinSegments(argv.output, name) as FilePath
 | 
			
		||||
 | 
			
		||||
        graph.addEdge(src, dest)
 | 
			
		||||
        yield copyFile(argv, fp)
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      return graph
 | 
			
		||||
    },
 | 
			
		||||
    async *emit({ argv, cfg }, _content, _resources) {
 | 
			
		||||
      const assetsPath = argv.output
 | 
			
		||||
      const fps = await filesToCopy(argv, cfg)
 | 
			
		||||
      for (const fp of fps) {
 | 
			
		||||
        const ext = path.extname(fp)
 | 
			
		||||
        const src = joinSegments(argv.directory, fp) as FilePath
 | 
			
		||||
        const name = (slugifyFilePath(fp as FilePath, true) + ext) as FilePath
 | 
			
		||||
    async *partialEmit(ctx, _content, _resources, changeEvents) {
 | 
			
		||||
      for (const changeEvent of changeEvents) {
 | 
			
		||||
        const ext = path.extname(changeEvent.path)
 | 
			
		||||
        if (ext === ".md") continue
 | 
			
		||||
 | 
			
		||||
        const dest = joinSegments(assetsPath, name) as FilePath
 | 
			
		||||
        const dir = path.dirname(dest) as FilePath
 | 
			
		||||
        await fs.promises.mkdir(dir, { recursive: true }) // ensure dir exists
 | 
			
		||||
        await fs.promises.copyFile(src, dest)
 | 
			
		||||
        yield dest
 | 
			
		||||
        if (changeEvent.type === "add" || changeEvent.type === "change") {
 | 
			
		||||
          yield copyFile(ctx.argv, changeEvent.path)
 | 
			
		||||
        } else if (changeEvent.type === "delete") {
 | 
			
		||||
          const name = slugifyFilePath(changeEvent.path)
 | 
			
		||||
          const dest = joinSegments(ctx.argv.output, name) as FilePath
 | 
			
		||||
          await fs.promises.unlink(dest)
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
    },
 | 
			
		||||
  }
 | 
			
		||||
 
 | 
			
		||||
@@ -2,7 +2,6 @@ import { FilePath, joinSegments } from "../../util/path"
 | 
			
		||||
import { QuartzEmitterPlugin } from "../types"
 | 
			
		||||
import fs from "fs"
 | 
			
		||||
import chalk from "chalk"
 | 
			
		||||
import DepGraph from "../../depgraph"
 | 
			
		||||
 | 
			
		||||
export function extractDomainFromBaseUrl(baseUrl: string) {
 | 
			
		||||
  const url = new URL(`https://${baseUrl}`)
 | 
			
		||||
@@ -11,10 +10,7 @@ export function extractDomainFromBaseUrl(baseUrl: string) {
 | 
			
		||||
 | 
			
		||||
export const CNAME: QuartzEmitterPlugin = () => ({
 | 
			
		||||
  name: "CNAME",
 | 
			
		||||
  async getDependencyGraph(_ctx, _content, _resources) {
 | 
			
		||||
    return new DepGraph<FilePath>()
 | 
			
		||||
  },
 | 
			
		||||
  async emit({ argv, cfg }, _content, _resources) {
 | 
			
		||||
  async emit({ argv, cfg }) {
 | 
			
		||||
    if (!cfg.configuration.baseUrl) {
 | 
			
		||||
      console.warn(chalk.yellow("CNAME emitter requires `baseUrl` to be set in your configuration"))
 | 
			
		||||
      return []
 | 
			
		||||
@@ -27,4 +23,5 @@ export const CNAME: QuartzEmitterPlugin = () => ({
 | 
			
		||||
    await fs.promises.writeFile(path, content)
 | 
			
		||||
    return [path] as FilePath[]
 | 
			
		||||
  },
 | 
			
		||||
  async *partialEmit() {},
 | 
			
		||||
})
 | 
			
		||||
 
 | 
			
		||||
@@ -1,4 +1,4 @@
 | 
			
		||||
import { FilePath, FullSlug, joinSegments } from "../../util/path"
 | 
			
		||||
import { FullSlug, joinSegments } from "../../util/path"
 | 
			
		||||
import { QuartzEmitterPlugin } from "../types"
 | 
			
		||||
 | 
			
		||||
// @ts-ignore
 | 
			
		||||
@@ -13,7 +13,6 @@ import { googleFontHref, joinStyles, processGoogleFonts } from "../../util/theme
 | 
			
		||||
import { Features, transform } from "lightningcss"
 | 
			
		||||
import { transform as transpile } from "esbuild"
 | 
			
		||||
import { write } from "./helpers"
 | 
			
		||||
import DepGraph from "../../depgraph"
 | 
			
		||||
 | 
			
		||||
type ComponentResources = {
 | 
			
		||||
  css: string[]
 | 
			
		||||
@@ -203,9 +202,6 @@ function addGlobalPageResources(ctx: BuildCtx, componentResources: ComponentReso
 | 
			
		||||
export const ComponentResources: QuartzEmitterPlugin = () => {
 | 
			
		||||
  return {
 | 
			
		||||
    name: "ComponentResources",
 | 
			
		||||
    async getDependencyGraph(_ctx, _content, _resources) {
 | 
			
		||||
      return new DepGraph<FilePath>()
 | 
			
		||||
    },
 | 
			
		||||
    async *emit(ctx, _content, _resources) {
 | 
			
		||||
      const cfg = ctx.cfg.configuration
 | 
			
		||||
      // component specific scripts and styles
 | 
			
		||||
@@ -281,19 +277,22 @@ export const ComponentResources: QuartzEmitterPlugin = () => {
 | 
			
		||||
          },
 | 
			
		||||
          include: Features.MediaQueries,
 | 
			
		||||
        }).code.toString(),
 | 
			
		||||
      }),
 | 
			
		||||
        yield write({
 | 
			
		||||
          ctx,
 | 
			
		||||
          slug: "prescript" as FullSlug,
 | 
			
		||||
          ext: ".js",
 | 
			
		||||
          content: prescript,
 | 
			
		||||
        }),
 | 
			
		||||
        yield write({
 | 
			
		||||
          ctx,
 | 
			
		||||
          slug: "postscript" as FullSlug,
 | 
			
		||||
          ext: ".js",
 | 
			
		||||
          content: postscript,
 | 
			
		||||
        })
 | 
			
		||||
      })
 | 
			
		||||
 | 
			
		||||
      yield write({
 | 
			
		||||
        ctx,
 | 
			
		||||
        slug: "prescript" as FullSlug,
 | 
			
		||||
        ext: ".js",
 | 
			
		||||
        content: prescript,
 | 
			
		||||
      })
 | 
			
		||||
 | 
			
		||||
      yield write({
 | 
			
		||||
        ctx,
 | 
			
		||||
        slug: "postscript" as FullSlug,
 | 
			
		||||
        ext: ".js",
 | 
			
		||||
        content: postscript,
 | 
			
		||||
      })
 | 
			
		||||
    },
 | 
			
		||||
    async *partialEmit() {},
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -7,7 +7,6 @@ import { QuartzEmitterPlugin } from "../types"
 | 
			
		||||
import { toHtml } from "hast-util-to-html"
 | 
			
		||||
import { write } from "./helpers"
 | 
			
		||||
import { i18n } from "../../i18n"
 | 
			
		||||
import DepGraph from "../../depgraph"
 | 
			
		||||
 | 
			
		||||
export type ContentIndexMap = Map<FullSlug, ContentDetails>
 | 
			
		||||
export type ContentDetails = {
 | 
			
		||||
@@ -97,27 +96,7 @@ export const ContentIndex: QuartzEmitterPlugin<Partial<Options>> = (opts) => {
 | 
			
		||||
  opts = { ...defaultOptions, ...opts }
 | 
			
		||||
  return {
 | 
			
		||||
    name: "ContentIndex",
 | 
			
		||||
    async getDependencyGraph(ctx, content, _resources) {
 | 
			
		||||
      const graph = new DepGraph<FilePath>()
 | 
			
		||||
 | 
			
		||||
      for (const [_tree, file] of content) {
 | 
			
		||||
        const sourcePath = file.data.filePath!
 | 
			
		||||
 | 
			
		||||
        graph.addEdge(
 | 
			
		||||
          sourcePath,
 | 
			
		||||
          joinSegments(ctx.argv.output, "static/contentIndex.json") as FilePath,
 | 
			
		||||
        )
 | 
			
		||||
        if (opts?.enableSiteMap) {
 | 
			
		||||
          graph.addEdge(sourcePath, joinSegments(ctx.argv.output, "sitemap.xml") as FilePath)
 | 
			
		||||
        }
 | 
			
		||||
        if (opts?.enableRSS) {
 | 
			
		||||
          graph.addEdge(sourcePath, joinSegments(ctx.argv.output, "index.xml") as FilePath)
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      return graph
 | 
			
		||||
    },
 | 
			
		||||
    async *emit(ctx, content, _resources) {
 | 
			
		||||
    async *emit(ctx, content) {
 | 
			
		||||
      const cfg = ctx.cfg.configuration
 | 
			
		||||
      const linkIndex: ContentIndexMap = new Map()
 | 
			
		||||
      for (const [tree, file] of content) {
 | 
			
		||||
@@ -126,7 +105,7 @@ export const ContentIndex: QuartzEmitterPlugin<Partial<Options>> = (opts) => {
 | 
			
		||||
        if (opts?.includeEmptyFiles || (file.data.text && file.data.text !== "")) {
 | 
			
		||||
          linkIndex.set(slug, {
 | 
			
		||||
            slug,
 | 
			
		||||
            filePath: file.data.filePath!,
 | 
			
		||||
            filePath: file.data.relativePath!,
 | 
			
		||||
            title: file.data.frontmatter?.title!,
 | 
			
		||||
            links: file.data.links ?? [],
 | 
			
		||||
            tags: file.data.frontmatter?.tags ?? [],
 | 
			
		||||
 
 | 
			
		||||
@@ -1,54 +1,48 @@
 | 
			
		||||
import path from "path"
 | 
			
		||||
import { visit } from "unist-util-visit"
 | 
			
		||||
import { Root } from "hast"
 | 
			
		||||
import { VFile } from "vfile"
 | 
			
		||||
import { QuartzEmitterPlugin } from "../types"
 | 
			
		||||
import { QuartzComponentProps } from "../../components/types"
 | 
			
		||||
import HeaderConstructor from "../../components/Header"
 | 
			
		||||
import BodyConstructor from "../../components/Body"
 | 
			
		||||
import { pageResources, renderPage } from "../../components/renderPage"
 | 
			
		||||
import { FullPageLayout } from "../../cfg"
 | 
			
		||||
import { Argv } from "../../util/ctx"
 | 
			
		||||
import { FilePath, isRelativeURL, joinSegments, pathToRoot } from "../../util/path"
 | 
			
		||||
import { pathToRoot } from "../../util/path"
 | 
			
		||||
import { defaultContentPageLayout, sharedPageComponents } from "../../../quartz.layout"
 | 
			
		||||
import { Content } from "../../components"
 | 
			
		||||
import chalk from "chalk"
 | 
			
		||||
import { write } from "./helpers"
 | 
			
		||||
import DepGraph from "../../depgraph"
 | 
			
		||||
import { BuildCtx } from "../../util/ctx"
 | 
			
		||||
import { Node } from "unist"
 | 
			
		||||
import { StaticResources } from "../../util/resources"
 | 
			
		||||
import { QuartzPluginData } from "../vfile"
 | 
			
		||||
 | 
			
		||||
// get all the dependencies for the markdown file
 | 
			
		||||
// eg. images, scripts, stylesheets, transclusions
 | 
			
		||||
const parseDependencies = (argv: Argv, hast: Root, file: VFile): string[] => {
 | 
			
		||||
  const dependencies: string[] = []
 | 
			
		||||
async function processContent(
 | 
			
		||||
  ctx: BuildCtx,
 | 
			
		||||
  tree: Node,
 | 
			
		||||
  fileData: QuartzPluginData,
 | 
			
		||||
  allFiles: QuartzPluginData[],
 | 
			
		||||
  opts: FullPageLayout,
 | 
			
		||||
  resources: StaticResources,
 | 
			
		||||
) {
 | 
			
		||||
  const slug = fileData.slug!
 | 
			
		||||
  const cfg = ctx.cfg.configuration
 | 
			
		||||
  const externalResources = pageResources(pathToRoot(slug), resources)
 | 
			
		||||
  const componentData: QuartzComponentProps = {
 | 
			
		||||
    ctx,
 | 
			
		||||
    fileData,
 | 
			
		||||
    externalResources,
 | 
			
		||||
    cfg,
 | 
			
		||||
    children: [],
 | 
			
		||||
    tree,
 | 
			
		||||
    allFiles,
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  visit(hast, "element", (elem): void => {
 | 
			
		||||
    let ref: string | null = null
 | 
			
		||||
 | 
			
		||||
    if (
 | 
			
		||||
      ["script", "img", "audio", "video", "source", "iframe"].includes(elem.tagName) &&
 | 
			
		||||
      elem?.properties?.src
 | 
			
		||||
    ) {
 | 
			
		||||
      ref = elem.properties.src.toString()
 | 
			
		||||
    } else if (["a", "link"].includes(elem.tagName) && elem?.properties?.href) {
 | 
			
		||||
      // transclusions will create a tags with relative hrefs
 | 
			
		||||
      ref = elem.properties.href.toString()
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    // if it is a relative url, its a local file and we need to add
 | 
			
		||||
    // it to the dependency graph. otherwise, ignore
 | 
			
		||||
    if (ref === null || !isRelativeURL(ref)) {
 | 
			
		||||
      return
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    let fp = path.join(file.data.filePath!, path.relative(argv.directory, ref)).replace(/\\/g, "/")
 | 
			
		||||
    // markdown files have the .md extension stripped in hrefs, add it back here
 | 
			
		||||
    if (!fp.split("/").pop()?.includes(".")) {
 | 
			
		||||
      fp += ".md"
 | 
			
		||||
    }
 | 
			
		||||
    dependencies.push(fp)
 | 
			
		||||
  const content = renderPage(cfg, slug, componentData, opts, externalResources)
 | 
			
		||||
  return write({
 | 
			
		||||
    ctx,
 | 
			
		||||
    content,
 | 
			
		||||
    slug,
 | 
			
		||||
    ext: ".html",
 | 
			
		||||
  })
 | 
			
		||||
 | 
			
		||||
  return dependencies
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export const ContentPage: QuartzEmitterPlugin<Partial<FullPageLayout>> = (userOpts) => {
 | 
			
		||||
@@ -79,57 +73,22 @@ export const ContentPage: QuartzEmitterPlugin<Partial<FullPageLayout>> = (userOp
 | 
			
		||||
        Footer,
 | 
			
		||||
      ]
 | 
			
		||||
    },
 | 
			
		||||
    async getDependencyGraph(ctx, content, _resources) {
 | 
			
		||||
      const graph = new DepGraph<FilePath>()
 | 
			
		||||
 | 
			
		||||
      for (const [tree, file] of content) {
 | 
			
		||||
        const sourcePath = file.data.filePath!
 | 
			
		||||
        const slug = file.data.slug!
 | 
			
		||||
        graph.addEdge(sourcePath, joinSegments(ctx.argv.output, slug + ".html") as FilePath)
 | 
			
		||||
 | 
			
		||||
        parseDependencies(ctx.argv, tree as Root, file).forEach((dep) => {
 | 
			
		||||
          graph.addEdge(dep as FilePath, sourcePath)
 | 
			
		||||
        })
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      return graph
 | 
			
		||||
    },
 | 
			
		||||
    async *emit(ctx, content, resources) {
 | 
			
		||||
      const cfg = ctx.cfg.configuration
 | 
			
		||||
      const allFiles = content.map((c) => c[1].data)
 | 
			
		||||
 | 
			
		||||
      let containsIndex = false
 | 
			
		||||
 | 
			
		||||
      for (const [tree, file] of content) {
 | 
			
		||||
        const slug = file.data.slug!
 | 
			
		||||
        if (slug === "index") {
 | 
			
		||||
          containsIndex = true
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        if (file.data.slug?.endsWith("/index")) {
 | 
			
		||||
          continue
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        const externalResources = pageResources(pathToRoot(slug), file.data, resources)
 | 
			
		||||
        const componentData: QuartzComponentProps = {
 | 
			
		||||
          ctx,
 | 
			
		||||
          fileData: file.data,
 | 
			
		||||
          externalResources,
 | 
			
		||||
          cfg,
 | 
			
		||||
          children: [],
 | 
			
		||||
          tree,
 | 
			
		||||
          allFiles,
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        const content = renderPage(cfg, slug, componentData, opts, externalResources)
 | 
			
		||||
        yield write({
 | 
			
		||||
          ctx,
 | 
			
		||||
          content,
 | 
			
		||||
          slug,
 | 
			
		||||
          ext: ".html",
 | 
			
		||||
        })
 | 
			
		||||
        // only process home page, non-tag pages, and non-index pages
 | 
			
		||||
        if (slug.endsWith("/index") || slug.startsWith("tags/")) continue
 | 
			
		||||
        yield processContent(ctx, tree, file.data, allFiles, opts, resources)
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      if (!containsIndex && !ctx.argv.fastRebuild) {
 | 
			
		||||
      if (!containsIndex) {
 | 
			
		||||
        console.log(
 | 
			
		||||
          chalk.yellow(
 | 
			
		||||
            `\nWarning: you seem to be missing an \`index.md\` home page file at the root of your \`${ctx.argv.directory}\` folder (\`${path.join(ctx.argv.directory, "index.md")} does not exist\`). This may cause errors when deploying.`,
 | 
			
		||||
@@ -137,5 +96,25 @@ export const ContentPage: QuartzEmitterPlugin<Partial<FullPageLayout>> = (userOp
 | 
			
		||||
        )
 | 
			
		||||
      }
 | 
			
		||||
    },
 | 
			
		||||
    async *partialEmit(ctx, content, resources, changeEvents) {
 | 
			
		||||
      const allFiles = content.map((c) => c[1].data)
 | 
			
		||||
 | 
			
		||||
      // find all slugs that changed or were added
 | 
			
		||||
      const changedSlugs = new Set<string>()
 | 
			
		||||
      for (const changeEvent of changeEvents) {
 | 
			
		||||
        if (!changeEvent.file) continue
 | 
			
		||||
        if (changeEvent.type === "add" || changeEvent.type === "change") {
 | 
			
		||||
          changedSlugs.add(changeEvent.file.data.slug!)
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      for (const [tree, file] of content) {
 | 
			
		||||
        const slug = file.data.slug!
 | 
			
		||||
        if (!changedSlugs.has(slug)) continue
 | 
			
		||||
        if (slug.endsWith("/index") || slug.startsWith("tags/")) continue
 | 
			
		||||
 | 
			
		||||
        yield processContent(ctx, tree, file.data, allFiles, opts, resources)
 | 
			
		||||
      }
 | 
			
		||||
    },
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -7,7 +7,6 @@ import { ProcessedContent, QuartzPluginData, defaultProcessedContent } from "../
 | 
			
		||||
import { FullPageLayout } from "../../cfg"
 | 
			
		||||
import path from "path"
 | 
			
		||||
import {
 | 
			
		||||
  FilePath,
 | 
			
		||||
  FullSlug,
 | 
			
		||||
  SimpleSlug,
 | 
			
		||||
  stripSlashes,
 | 
			
		||||
@@ -18,13 +17,89 @@ import {
 | 
			
		||||
import { defaultListPageLayout, sharedPageComponents } from "../../../quartz.layout"
 | 
			
		||||
import { FolderContent } from "../../components"
 | 
			
		||||
import { write } from "./helpers"
 | 
			
		||||
import { i18n } from "../../i18n"
 | 
			
		||||
import DepGraph from "../../depgraph"
 | 
			
		||||
 | 
			
		||||
import { i18n, TRANSLATIONS } from "../../i18n"
 | 
			
		||||
import { BuildCtx } from "../../util/ctx"
 | 
			
		||||
import { StaticResources } from "../../util/resources"
 | 
			
		||||
interface FolderPageOptions extends FullPageLayout {
 | 
			
		||||
  sort?: (f1: QuartzPluginData, f2: QuartzPluginData) => number
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
async function* processFolderInfo(
 | 
			
		||||
  ctx: BuildCtx,
 | 
			
		||||
  folderInfo: Record<SimpleSlug, ProcessedContent>,
 | 
			
		||||
  allFiles: QuartzPluginData[],
 | 
			
		||||
  opts: FullPageLayout,
 | 
			
		||||
  resources: StaticResources,
 | 
			
		||||
) {
 | 
			
		||||
  for (const [folder, folderContent] of Object.entries(folderInfo) as [
 | 
			
		||||
    SimpleSlug,
 | 
			
		||||
    ProcessedContent,
 | 
			
		||||
  ][]) {
 | 
			
		||||
    const slug = joinSegments(folder, "index") as FullSlug
 | 
			
		||||
    const [tree, file] = folderContent
 | 
			
		||||
    const cfg = ctx.cfg.configuration
 | 
			
		||||
    const externalResources = pageResources(pathToRoot(slug), resources)
 | 
			
		||||
    const componentData: QuartzComponentProps = {
 | 
			
		||||
      ctx,
 | 
			
		||||
      fileData: file.data,
 | 
			
		||||
      externalResources,
 | 
			
		||||
      cfg,
 | 
			
		||||
      children: [],
 | 
			
		||||
      tree,
 | 
			
		||||
      allFiles,
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    const content = renderPage(cfg, slug, componentData, opts, externalResources)
 | 
			
		||||
    yield write({
 | 
			
		||||
      ctx,
 | 
			
		||||
      content,
 | 
			
		||||
      slug,
 | 
			
		||||
      ext: ".html",
 | 
			
		||||
    })
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
function computeFolderInfo(
 | 
			
		||||
  folders: Set<SimpleSlug>,
 | 
			
		||||
  content: ProcessedContent[],
 | 
			
		||||
  locale: keyof typeof TRANSLATIONS,
 | 
			
		||||
): Record<SimpleSlug, ProcessedContent> {
 | 
			
		||||
  // Create default folder descriptions
 | 
			
		||||
  const folderInfo: Record<SimpleSlug, ProcessedContent> = Object.fromEntries(
 | 
			
		||||
    [...folders].map((folder) => [
 | 
			
		||||
      folder,
 | 
			
		||||
      defaultProcessedContent({
 | 
			
		||||
        slug: joinSegments(folder, "index") as FullSlug,
 | 
			
		||||
        frontmatter: {
 | 
			
		||||
          title: `${i18n(locale).pages.folderContent.folder}: ${folder}`,
 | 
			
		||||
          tags: [],
 | 
			
		||||
        },
 | 
			
		||||
      }),
 | 
			
		||||
    ]),
 | 
			
		||||
  )
 | 
			
		||||
 | 
			
		||||
  // Update with actual content if available
 | 
			
		||||
  for (const [tree, file] of content) {
 | 
			
		||||
    const slug = stripSlashes(simplifySlug(file.data.slug!)) as SimpleSlug
 | 
			
		||||
    if (folders.has(slug)) {
 | 
			
		||||
      folderInfo[slug] = [tree, file]
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  return folderInfo
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
function _getFolders(slug: FullSlug): SimpleSlug[] {
 | 
			
		||||
  var folderName = path.dirname(slug ?? "") as SimpleSlug
 | 
			
		||||
  const parentFolderNames = [folderName]
 | 
			
		||||
 | 
			
		||||
  while (folderName !== ".") {
 | 
			
		||||
    folderName = path.dirname(folderName ?? "") as SimpleSlug
 | 
			
		||||
    parentFolderNames.push(folderName)
 | 
			
		||||
  }
 | 
			
		||||
  return parentFolderNames
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export const FolderPage: QuartzEmitterPlugin<Partial<FolderPageOptions>> = (userOpts) => {
 | 
			
		||||
  const opts: FullPageLayout = {
 | 
			
		||||
    ...sharedPageComponents,
 | 
			
		||||
@@ -53,22 +128,6 @@ export const FolderPage: QuartzEmitterPlugin<Partial<FolderPageOptions>> = (user
 | 
			
		||||
        Footer,
 | 
			
		||||
      ]
 | 
			
		||||
    },
 | 
			
		||||
    async getDependencyGraph(_ctx, content, _resources) {
 | 
			
		||||
      // Example graph:
 | 
			
		||||
      // nested/file.md --> nested/index.html
 | 
			
		||||
      // nested/file2.md ------^
 | 
			
		||||
      const graph = new DepGraph<FilePath>()
 | 
			
		||||
 | 
			
		||||
      content.map(([_tree, vfile]) => {
 | 
			
		||||
        const slug = vfile.data.slug
 | 
			
		||||
        const folderName = path.dirname(slug ?? "") as SimpleSlug
 | 
			
		||||
        if (slug && folderName !== "." && folderName !== "tags") {
 | 
			
		||||
          graph.addEdge(vfile.data.filePath!, joinSegments(folderName, "index.html") as FilePath)
 | 
			
		||||
        }
 | 
			
		||||
      })
 | 
			
		||||
 | 
			
		||||
      return graph
 | 
			
		||||
    },
 | 
			
		||||
    async *emit(ctx, content, resources) {
 | 
			
		||||
      const allFiles = content.map((c) => c[1].data)
 | 
			
		||||
      const cfg = ctx.cfg.configuration
 | 
			
		||||
@@ -83,59 +142,29 @@ export const FolderPage: QuartzEmitterPlugin<Partial<FolderPageOptions>> = (user
 | 
			
		||||
        }),
 | 
			
		||||
      )
 | 
			
		||||
 | 
			
		||||
      const folderDescriptions: Record<string, ProcessedContent> = Object.fromEntries(
 | 
			
		||||
        [...folders].map((folder) => [
 | 
			
		||||
          folder,
 | 
			
		||||
          defaultProcessedContent({
 | 
			
		||||
            slug: joinSegments(folder, "index") as FullSlug,
 | 
			
		||||
            frontmatter: {
 | 
			
		||||
              title: `${i18n(cfg.locale).pages.folderContent.folder}: ${folder}`,
 | 
			
		||||
              tags: [],
 | 
			
		||||
            },
 | 
			
		||||
          }),
 | 
			
		||||
        ]),
 | 
			
		||||
      )
 | 
			
		||||
      const folderInfo = computeFolderInfo(folders, content, cfg.locale)
 | 
			
		||||
      yield* processFolderInfo(ctx, folderInfo, allFiles, opts, resources)
 | 
			
		||||
    },
 | 
			
		||||
    async *partialEmit(ctx, content, resources, changeEvents) {
 | 
			
		||||
      const allFiles = content.map((c) => c[1].data)
 | 
			
		||||
      const cfg = ctx.cfg.configuration
 | 
			
		||||
 | 
			
		||||
      for (const [tree, file] of content) {
 | 
			
		||||
        const slug = stripSlashes(simplifySlug(file.data.slug!)) as SimpleSlug
 | 
			
		||||
        if (folders.has(slug)) {
 | 
			
		||||
          folderDescriptions[slug] = [tree, file]
 | 
			
		||||
        }
 | 
			
		||||
      // Find all folders that need to be updated based on changed files
 | 
			
		||||
      const affectedFolders: Set<SimpleSlug> = new Set()
 | 
			
		||||
      for (const changeEvent of changeEvents) {
 | 
			
		||||
        if (!changeEvent.file) continue
 | 
			
		||||
        const slug = changeEvent.file.data.slug!
 | 
			
		||||
        const folders = _getFolders(slug).filter(
 | 
			
		||||
          (folderName) => folderName !== "." && folderName !== "tags",
 | 
			
		||||
        )
 | 
			
		||||
        folders.forEach((folder) => affectedFolders.add(folder))
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      for (const folder of folders) {
 | 
			
		||||
        const slug = joinSegments(folder, "index") as FullSlug
 | 
			
		||||
        const [tree, file] = folderDescriptions[folder]
 | 
			
		||||
        const externalResources = pageResources(pathToRoot(slug), file.data, resources)
 | 
			
		||||
        const componentData: QuartzComponentProps = {
 | 
			
		||||
          ctx,
 | 
			
		||||
          fileData: file.data,
 | 
			
		||||
          externalResources,
 | 
			
		||||
          cfg,
 | 
			
		||||
          children: [],
 | 
			
		||||
          tree,
 | 
			
		||||
          allFiles,
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        const content = renderPage(cfg, slug, componentData, opts, externalResources)
 | 
			
		||||
        yield write({
 | 
			
		||||
          ctx,
 | 
			
		||||
          content,
 | 
			
		||||
          slug,
 | 
			
		||||
          ext: ".html",
 | 
			
		||||
        })
 | 
			
		||||
      // If there are affected folders, rebuild their pages
 | 
			
		||||
      if (affectedFolders.size > 0) {
 | 
			
		||||
        const folderInfo = computeFolderInfo(affectedFolders, content, cfg.locale)
 | 
			
		||||
        yield* processFolderInfo(ctx, folderInfo, allFiles, opts, resources)
 | 
			
		||||
      }
 | 
			
		||||
    },
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
function _getFolders(slug: FullSlug): SimpleSlug[] {
 | 
			
		||||
  var folderName = path.dirname(slug ?? "") as SimpleSlug
 | 
			
		||||
  const parentFolderNames = [folderName]
 | 
			
		||||
 | 
			
		||||
  while (folderName !== ".") {
 | 
			
		||||
    folderName = path.dirname(folderName ?? "") as SimpleSlug
 | 
			
		||||
    parentFolderNames.push(folderName)
 | 
			
		||||
  }
 | 
			
		||||
  return parentFolderNames
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -4,10 +4,12 @@ import { unescapeHTML } from "../../util/escape"
 | 
			
		||||
import { FullSlug, getFileExtension } from "../../util/path"
 | 
			
		||||
import { ImageOptions, SocialImageOptions, defaultImage, getSatoriFonts } from "../../util/og"
 | 
			
		||||
import sharp from "sharp"
 | 
			
		||||
import satori from "satori"
 | 
			
		||||
import satori, { SatoriOptions } from "satori"
 | 
			
		||||
import { loadEmoji, getIconCode } from "../../util/emoji"
 | 
			
		||||
import { Readable } from "stream"
 | 
			
		||||
import { write } from "./helpers"
 | 
			
		||||
import { BuildCtx } from "../../util/ctx"
 | 
			
		||||
import { QuartzPluginData } from "../vfile"
 | 
			
		||||
 | 
			
		||||
const defaultOptions: SocialImageOptions = {
 | 
			
		||||
  colorScheme: "lightMode",
 | 
			
		||||
@@ -42,6 +44,41 @@ async function generateSocialImage(
 | 
			
		||||
  return sharp(Buffer.from(svg)).webp({ quality: 40 })
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
async function processOgImage(
 | 
			
		||||
  ctx: BuildCtx,
 | 
			
		||||
  fileData: QuartzPluginData,
 | 
			
		||||
  fonts: SatoriOptions["fonts"],
 | 
			
		||||
  fullOptions: SocialImageOptions,
 | 
			
		||||
) {
 | 
			
		||||
  const cfg = ctx.cfg.configuration
 | 
			
		||||
  const slug = fileData.slug!
 | 
			
		||||
  const titleSuffix = cfg.pageTitleSuffix ?? ""
 | 
			
		||||
  const title =
 | 
			
		||||
    (fileData.frontmatter?.title ?? i18n(cfg.locale).propertyDefaults.title) + titleSuffix
 | 
			
		||||
  const description =
 | 
			
		||||
    fileData.frontmatter?.socialDescription ??
 | 
			
		||||
    fileData.frontmatter?.description ??
 | 
			
		||||
    unescapeHTML(fileData.description?.trim() ?? i18n(cfg.locale).propertyDefaults.description)
 | 
			
		||||
 | 
			
		||||
  const stream = await generateSocialImage(
 | 
			
		||||
    {
 | 
			
		||||
      title,
 | 
			
		||||
      description,
 | 
			
		||||
      fonts,
 | 
			
		||||
      cfg,
 | 
			
		||||
      fileData,
 | 
			
		||||
    },
 | 
			
		||||
    fullOptions,
 | 
			
		||||
  )
 | 
			
		||||
 | 
			
		||||
  return write({
 | 
			
		||||
    ctx,
 | 
			
		||||
    content: stream,
 | 
			
		||||
    slug: `${slug}-og-image` as FullSlug,
 | 
			
		||||
    ext: ".webp",
 | 
			
		||||
  })
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export const CustomOgImagesEmitterName = "CustomOgImages"
 | 
			
		||||
export const CustomOgImages: QuartzEmitterPlugin<Partial<SocialImageOptions>> = (userOpts) => {
 | 
			
		||||
  const fullOptions = { ...defaultOptions, ...userOpts }
 | 
			
		||||
@@ -58,39 +95,23 @@ export const CustomOgImages: QuartzEmitterPlugin<Partial<SocialImageOptions>> =
 | 
			
		||||
      const fonts = await getSatoriFonts(headerFont, bodyFont)
 | 
			
		||||
 | 
			
		||||
      for (const [_tree, vfile] of content) {
 | 
			
		||||
        // if this file defines socialImage, we can skip
 | 
			
		||||
        if (vfile.data.frontmatter?.socialImage !== undefined) {
 | 
			
		||||
          continue
 | 
			
		||||
        if (vfile.data.frontmatter?.socialImage !== undefined) continue
 | 
			
		||||
        yield processOgImage(ctx, vfile.data, fonts, fullOptions)
 | 
			
		||||
      }
 | 
			
		||||
    },
 | 
			
		||||
    async *partialEmit(ctx, _content, _resources, changeEvents) {
 | 
			
		||||
      const cfg = ctx.cfg.configuration
 | 
			
		||||
      const headerFont = cfg.theme.typography.header
 | 
			
		||||
      const bodyFont = cfg.theme.typography.body
 | 
			
		||||
      const fonts = await getSatoriFonts(headerFont, bodyFont)
 | 
			
		||||
 | 
			
		||||
      // find all slugs that changed or were added
 | 
			
		||||
      for (const changeEvent of changeEvents) {
 | 
			
		||||
        if (!changeEvent.file) continue
 | 
			
		||||
        if (changeEvent.file.data.frontmatter?.socialImage !== undefined) continue
 | 
			
		||||
        if (changeEvent.type === "add" || changeEvent.type === "change") {
 | 
			
		||||
          yield processOgImage(ctx, changeEvent.file.data, fonts, fullOptions)
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        const slug = vfile.data.slug!
 | 
			
		||||
        const titleSuffix = cfg.pageTitleSuffix ?? ""
 | 
			
		||||
        const title =
 | 
			
		||||
          (vfile.data.frontmatter?.title ?? i18n(cfg.locale).propertyDefaults.title) + titleSuffix
 | 
			
		||||
        const description =
 | 
			
		||||
          vfile.data.frontmatter?.socialDescription ??
 | 
			
		||||
          vfile.data.frontmatter?.description ??
 | 
			
		||||
          unescapeHTML(
 | 
			
		||||
            vfile.data.description?.trim() ?? i18n(cfg.locale).propertyDefaults.description,
 | 
			
		||||
          )
 | 
			
		||||
 | 
			
		||||
        const stream = await generateSocialImage(
 | 
			
		||||
          {
 | 
			
		||||
            title,
 | 
			
		||||
            description,
 | 
			
		||||
            fonts,
 | 
			
		||||
            cfg,
 | 
			
		||||
            fileData: vfile.data,
 | 
			
		||||
          },
 | 
			
		||||
          fullOptions,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        yield write({
 | 
			
		||||
          ctx,
 | 
			
		||||
          content: stream,
 | 
			
		||||
          slug: `${slug}-og-image` as FullSlug,
 | 
			
		||||
          ext: ".webp",
 | 
			
		||||
        })
 | 
			
		||||
      }
 | 
			
		||||
    },
 | 
			
		||||
    externalResources: (ctx) => {
 | 
			
		||||
 
 | 
			
		||||
@@ -2,26 +2,11 @@ import { FilePath, QUARTZ, joinSegments } from "../../util/path"
 | 
			
		||||
import { QuartzEmitterPlugin } from "../types"
 | 
			
		||||
import fs from "fs"
 | 
			
		||||
import { glob } from "../../util/glob"
 | 
			
		||||
import DepGraph from "../../depgraph"
 | 
			
		||||
import { dirname } from "path"
 | 
			
		||||
 | 
			
		||||
export const Static: QuartzEmitterPlugin = () => ({
 | 
			
		||||
  name: "Static",
 | 
			
		||||
  async getDependencyGraph({ argv, cfg }, _content, _resources) {
 | 
			
		||||
    const graph = new DepGraph<FilePath>()
 | 
			
		||||
 | 
			
		||||
    const staticPath = joinSegments(QUARTZ, "static")
 | 
			
		||||
    const fps = await glob("**", staticPath, cfg.configuration.ignorePatterns)
 | 
			
		||||
    for (const fp of fps) {
 | 
			
		||||
      graph.addEdge(
 | 
			
		||||
        joinSegments("static", fp) as FilePath,
 | 
			
		||||
        joinSegments(argv.output, "static", fp) as FilePath,
 | 
			
		||||
      )
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    return graph
 | 
			
		||||
  },
 | 
			
		||||
  async *emit({ argv, cfg }, _content) {
 | 
			
		||||
  async *emit({ argv, cfg }) {
 | 
			
		||||
    const staticPath = joinSegments(QUARTZ, "static")
 | 
			
		||||
    const fps = await glob("**", staticPath, cfg.configuration.ignorePatterns)
 | 
			
		||||
    const outputStaticPath = joinSegments(argv.output, "static")
 | 
			
		||||
@@ -34,4 +19,5 @@ export const Static: QuartzEmitterPlugin = () => ({
 | 
			
		||||
      yield dest
 | 
			
		||||
    }
 | 
			
		||||
  },
 | 
			
		||||
  async *partialEmit() {},
 | 
			
		||||
})
 | 
			
		||||
 
 | 
			
		||||
@@ -5,23 +5,94 @@ import BodyConstructor from "../../components/Body"
 | 
			
		||||
import { pageResources, renderPage } from "../../components/renderPage"
 | 
			
		||||
import { ProcessedContent, QuartzPluginData, defaultProcessedContent } from "../vfile"
 | 
			
		||||
import { FullPageLayout } from "../../cfg"
 | 
			
		||||
import {
 | 
			
		||||
  FilePath,
 | 
			
		||||
  FullSlug,
 | 
			
		||||
  getAllSegmentPrefixes,
 | 
			
		||||
  joinSegments,
 | 
			
		||||
  pathToRoot,
 | 
			
		||||
} from "../../util/path"
 | 
			
		||||
import { FullSlug, getAllSegmentPrefixes, joinSegments, pathToRoot } from "../../util/path"
 | 
			
		||||
import { defaultListPageLayout, sharedPageComponents } from "../../../quartz.layout"
 | 
			
		||||
import { TagContent } from "../../components"
 | 
			
		||||
import { write } from "./helpers"
 | 
			
		||||
import { i18n } from "../../i18n"
 | 
			
		||||
import DepGraph from "../../depgraph"
 | 
			
		||||
import { i18n, TRANSLATIONS } from "../../i18n"
 | 
			
		||||
import { BuildCtx } from "../../util/ctx"
 | 
			
		||||
import { StaticResources } from "../../util/resources"
 | 
			
		||||
 | 
			
		||||
interface TagPageOptions extends FullPageLayout {
 | 
			
		||||
  sort?: (f1: QuartzPluginData, f2: QuartzPluginData) => number
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
function computeTagInfo(
 | 
			
		||||
  allFiles: QuartzPluginData[],
 | 
			
		||||
  content: ProcessedContent[],
 | 
			
		||||
  locale: keyof typeof TRANSLATIONS,
 | 
			
		||||
): [Set<string>, Record<string, ProcessedContent>] {
 | 
			
		||||
  const tags: Set<string> = new Set(
 | 
			
		||||
    allFiles.flatMap((data) => data.frontmatter?.tags ?? []).flatMap(getAllSegmentPrefixes),
 | 
			
		||||
  )
 | 
			
		||||
 | 
			
		||||
  // add base tag
 | 
			
		||||
  tags.add("index")
 | 
			
		||||
 | 
			
		||||
  const tagDescriptions: Record<string, ProcessedContent> = Object.fromEntries(
 | 
			
		||||
    [...tags].map((tag) => {
 | 
			
		||||
      const title =
 | 
			
		||||
        tag === "index"
 | 
			
		||||
          ? i18n(locale).pages.tagContent.tagIndex
 | 
			
		||||
          : `${i18n(locale).pages.tagContent.tag}: ${tag}`
 | 
			
		||||
      return [
 | 
			
		||||
        tag,
 | 
			
		||||
        defaultProcessedContent({
 | 
			
		||||
          slug: joinSegments("tags", tag) as FullSlug,
 | 
			
		||||
          frontmatter: { title, tags: [] },
 | 
			
		||||
        }),
 | 
			
		||||
      ]
 | 
			
		||||
    }),
 | 
			
		||||
  )
 | 
			
		||||
 | 
			
		||||
  // Update with actual content if available
 | 
			
		||||
  for (const [tree, file] of content) {
 | 
			
		||||
    const slug = file.data.slug!
 | 
			
		||||
    if (slug.startsWith("tags/")) {
 | 
			
		||||
      const tag = slug.slice("tags/".length)
 | 
			
		||||
      if (tags.has(tag)) {
 | 
			
		||||
        tagDescriptions[tag] = [tree, file]
 | 
			
		||||
        if (file.data.frontmatter?.title === tag) {
 | 
			
		||||
          file.data.frontmatter.title = `${i18n(locale).pages.tagContent.tag}: ${tag}`
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  return [tags, tagDescriptions]
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
async function processTagPage(
 | 
			
		||||
  ctx: BuildCtx,
 | 
			
		||||
  tag: string,
 | 
			
		||||
  tagContent: ProcessedContent,
 | 
			
		||||
  allFiles: QuartzPluginData[],
 | 
			
		||||
  opts: FullPageLayout,
 | 
			
		||||
  resources: StaticResources,
 | 
			
		||||
) {
 | 
			
		||||
  const slug = joinSegments("tags", tag) as FullSlug
 | 
			
		||||
  const [tree, file] = tagContent
 | 
			
		||||
  const cfg = ctx.cfg.configuration
 | 
			
		||||
  const externalResources = pageResources(pathToRoot(slug), resources)
 | 
			
		||||
  const componentData: QuartzComponentProps = {
 | 
			
		||||
    ctx,
 | 
			
		||||
    fileData: file.data,
 | 
			
		||||
    externalResources,
 | 
			
		||||
    cfg,
 | 
			
		||||
    children: [],
 | 
			
		||||
    tree,
 | 
			
		||||
    allFiles,
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  const content = renderPage(cfg, slug, componentData, opts, externalResources)
 | 
			
		||||
  return write({
 | 
			
		||||
    ctx,
 | 
			
		||||
    content,
 | 
			
		||||
    slug: file.data.slug!,
 | 
			
		||||
    ext: ".html",
 | 
			
		||||
  })
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export const TagPage: QuartzEmitterPlugin<Partial<TagPageOptions>> = (userOpts) => {
 | 
			
		||||
  const opts: FullPageLayout = {
 | 
			
		||||
    ...sharedPageComponents,
 | 
			
		||||
@@ -50,88 +121,49 @@ export const TagPage: QuartzEmitterPlugin<Partial<TagPageOptions>> = (userOpts)
 | 
			
		||||
        Footer,
 | 
			
		||||
      ]
 | 
			
		||||
    },
 | 
			
		||||
    async getDependencyGraph(ctx, content, _resources) {
 | 
			
		||||
      const graph = new DepGraph<FilePath>()
 | 
			
		||||
 | 
			
		||||
      for (const [_tree, file] of content) {
 | 
			
		||||
        const sourcePath = file.data.filePath!
 | 
			
		||||
        const tags = (file.data.frontmatter?.tags ?? []).flatMap(getAllSegmentPrefixes)
 | 
			
		||||
        // if the file has at least one tag, it is used in the tag index page
 | 
			
		||||
        if (tags.length > 0) {
 | 
			
		||||
          tags.push("index")
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        for (const tag of tags) {
 | 
			
		||||
          graph.addEdge(
 | 
			
		||||
            sourcePath,
 | 
			
		||||
            joinSegments(ctx.argv.output, "tags", tag + ".html") as FilePath,
 | 
			
		||||
          )
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      return graph
 | 
			
		||||
    },
 | 
			
		||||
    async *emit(ctx, content, resources) {
 | 
			
		||||
      const allFiles = content.map((c) => c[1].data)
 | 
			
		||||
      const cfg = ctx.cfg.configuration
 | 
			
		||||
 | 
			
		||||
      const tags: Set<string> = new Set(
 | 
			
		||||
        allFiles.flatMap((data) => data.frontmatter?.tags ?? []).flatMap(getAllSegmentPrefixes),
 | 
			
		||||
      )
 | 
			
		||||
 | 
			
		||||
      // add base tag
 | 
			
		||||
      tags.add("index")
 | 
			
		||||
 | 
			
		||||
      const tagDescriptions: Record<string, ProcessedContent> = Object.fromEntries(
 | 
			
		||||
        [...tags].map((tag) => {
 | 
			
		||||
          const title =
 | 
			
		||||
            tag === "index"
 | 
			
		||||
              ? i18n(cfg.locale).pages.tagContent.tagIndex
 | 
			
		||||
              : `${i18n(cfg.locale).pages.tagContent.tag}: ${tag}`
 | 
			
		||||
          return [
 | 
			
		||||
            tag,
 | 
			
		||||
            defaultProcessedContent({
 | 
			
		||||
              slug: joinSegments("tags", tag) as FullSlug,
 | 
			
		||||
              frontmatter: { title, tags: [] },
 | 
			
		||||
            }),
 | 
			
		||||
          ]
 | 
			
		||||
        }),
 | 
			
		||||
      )
 | 
			
		||||
 | 
			
		||||
      for (const [tree, file] of content) {
 | 
			
		||||
        const slug = file.data.slug!
 | 
			
		||||
        if (slug.startsWith("tags/")) {
 | 
			
		||||
          const tag = slug.slice("tags/".length)
 | 
			
		||||
          if (tags.has(tag)) {
 | 
			
		||||
            tagDescriptions[tag] = [tree, file]
 | 
			
		||||
            if (file.data.frontmatter?.title === tag) {
 | 
			
		||||
              file.data.frontmatter.title = `${i18n(cfg.locale).pages.tagContent.tag}: ${tag}`
 | 
			
		||||
            }
 | 
			
		||||
          }
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
      const [tags, tagDescriptions] = computeTagInfo(allFiles, content, cfg.locale)
 | 
			
		||||
 | 
			
		||||
      for (const tag of tags) {
 | 
			
		||||
        const slug = joinSegments("tags", tag) as FullSlug
 | 
			
		||||
        const [tree, file] = tagDescriptions[tag]
 | 
			
		||||
        const externalResources = pageResources(pathToRoot(slug), file.data, resources)
 | 
			
		||||
        const componentData: QuartzComponentProps = {
 | 
			
		||||
          ctx,
 | 
			
		||||
          fileData: file.data,
 | 
			
		||||
          externalResources,
 | 
			
		||||
          cfg,
 | 
			
		||||
          children: [],
 | 
			
		||||
          tree,
 | 
			
		||||
          allFiles,
 | 
			
		||||
        yield processTagPage(ctx, tag, tagDescriptions[tag], allFiles, opts, resources)
 | 
			
		||||
      }
 | 
			
		||||
    },
 | 
			
		||||
    async *partialEmit(ctx, content, resources, changeEvents) {
 | 
			
		||||
      const allFiles = content.map((c) => c[1].data)
 | 
			
		||||
      const cfg = ctx.cfg.configuration
 | 
			
		||||
 | 
			
		||||
      // Find all tags that need to be updated based on changed files
 | 
			
		||||
      const affectedTags: Set<string> = new Set()
 | 
			
		||||
      for (const changeEvent of changeEvents) {
 | 
			
		||||
        if (!changeEvent.file) continue
 | 
			
		||||
        const slug = changeEvent.file.data.slug!
 | 
			
		||||
 | 
			
		||||
        // If it's a tag page itself that changed
 | 
			
		||||
        if (slug.startsWith("tags/")) {
 | 
			
		||||
          const tag = slug.slice("tags/".length)
 | 
			
		||||
          affectedTags.add(tag)
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        const content = renderPage(cfg, slug, componentData, opts, externalResources)
 | 
			
		||||
        yield write({
 | 
			
		||||
          ctx,
 | 
			
		||||
          content,
 | 
			
		||||
          slug: file.data.slug!,
 | 
			
		||||
          ext: ".html",
 | 
			
		||||
        })
 | 
			
		||||
        // If a file with tags changed, we need to update those tag pages
 | 
			
		||||
        const fileTags = changeEvent.file.data.frontmatter?.tags ?? []
 | 
			
		||||
        fileTags.flatMap(getAllSegmentPrefixes).forEach((tag) => affectedTags.add(tag))
 | 
			
		||||
 | 
			
		||||
        // Always update the index tag page if any file changes
 | 
			
		||||
        affectedTags.add("index")
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      // If there are affected tags, rebuild their pages
 | 
			
		||||
      if (affectedTags.size > 0) {
 | 
			
		||||
        // We still need to compute all tags because tag pages show all tags
 | 
			
		||||
        const [_tags, tagDescriptions] = computeTagInfo(allFiles, content, cfg.locale)
 | 
			
		||||
 | 
			
		||||
        for (const tag of affectedTags) {
 | 
			
		||||
          if (tagDescriptions[tag]) {
 | 
			
		||||
            yield processTagPage(ctx, tag, tagDescriptions[tag], allFiles, opts, resources)
 | 
			
		||||
          }
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
    },
 | 
			
		||||
  }
 | 
			
		||||
 
 | 
			
		||||
@@ -3,12 +3,9 @@ import remarkFrontmatter from "remark-frontmatter"
 | 
			
		||||
import { QuartzTransformerPlugin } from "../types"
 | 
			
		||||
import yaml from "js-yaml"
 | 
			
		||||
import toml from "toml"
 | 
			
		||||
import { FilePath, FullSlug, joinSegments, slugifyFilePath, slugTag } from "../../util/path"
 | 
			
		||||
import { FilePath, FullSlug, getFileExtension, slugifyFilePath, slugTag } from "../../util/path"
 | 
			
		||||
import { QuartzPluginData } from "../vfile"
 | 
			
		||||
import { i18n } from "../../i18n"
 | 
			
		||||
import { Argv } from "../../util/ctx"
 | 
			
		||||
import { VFile } from "vfile"
 | 
			
		||||
import path from "path"
 | 
			
		||||
 | 
			
		||||
export interface Options {
 | 
			
		||||
  delimiters: string | [string, string]
 | 
			
		||||
@@ -43,26 +40,24 @@ function coerceToArray(input: string | string[]): string[] | undefined {
 | 
			
		||||
    .map((tag: string | number) => tag.toString())
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export function getAliasSlugs(aliases: string[], argv: Argv, file: VFile): FullSlug[] {
 | 
			
		||||
  const dir = path.posix.relative(argv.directory, path.dirname(file.data.filePath!))
 | 
			
		||||
  const slugs: FullSlug[] = aliases.map(
 | 
			
		||||
    (alias) => path.posix.join(dir, slugifyFilePath(alias as FilePath)) as FullSlug,
 | 
			
		||||
  )
 | 
			
		||||
  const permalink = file.data.frontmatter?.permalink
 | 
			
		||||
  if (typeof permalink === "string") {
 | 
			
		||||
    slugs.push(permalink as FullSlug)
 | 
			
		||||
function getAliasSlugs(aliases: string[]): FullSlug[] {
 | 
			
		||||
  const res: FullSlug[] = []
 | 
			
		||||
  for (const alias of aliases) {
 | 
			
		||||
    const isMd = getFileExtension(alias) === "md"
 | 
			
		||||
    const mockFp = isMd ? alias : alias + ".md"
 | 
			
		||||
    const slug = slugifyFilePath(mockFp as FilePath)
 | 
			
		||||
    res.push(slug)
 | 
			
		||||
  }
 | 
			
		||||
  // fix any slugs that have trailing slash
 | 
			
		||||
  return slugs.map((slug) =>
 | 
			
		||||
    slug.endsWith("/") ? (joinSegments(slug, "index") as FullSlug) : slug,
 | 
			
		||||
  )
 | 
			
		||||
 | 
			
		||||
  return res
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export const FrontMatter: QuartzTransformerPlugin<Partial<Options>> = (userOpts) => {
 | 
			
		||||
  const opts = { ...defaultOptions, ...userOpts }
 | 
			
		||||
  return {
 | 
			
		||||
    name: "FrontMatter",
 | 
			
		||||
    markdownPlugins({ cfg, allSlugs, argv }) {
 | 
			
		||||
    markdownPlugins(ctx) {
 | 
			
		||||
      const { cfg, allSlugs } = ctx
 | 
			
		||||
      return [
 | 
			
		||||
        [remarkFrontmatter, ["yaml", "toml"]],
 | 
			
		||||
        () => {
 | 
			
		||||
@@ -88,9 +83,18 @@ export const FrontMatter: QuartzTransformerPlugin<Partial<Options>> = (userOpts)
 | 
			
		||||
            const aliases = coerceToArray(coalesceAliases(data, ["aliases", "alias"]))
 | 
			
		||||
            if (aliases) {
 | 
			
		||||
              data.aliases = aliases // frontmatter
 | 
			
		||||
              const slugs = (file.data.aliases = getAliasSlugs(aliases, argv, file))
 | 
			
		||||
              allSlugs.push(...slugs)
 | 
			
		||||
              file.data.aliases = getAliasSlugs(aliases)
 | 
			
		||||
              allSlugs.push(...file.data.aliases)
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
            if (data.permalink != null && data.permalink.toString() !== "") {
 | 
			
		||||
              data.permalink = data.permalink.toString() as FullSlug
 | 
			
		||||
              const aliases = file.data.aliases ?? []
 | 
			
		||||
              aliases.push(data.permalink)
 | 
			
		||||
              file.data.aliases = aliases
 | 
			
		||||
              allSlugs.push(data.permalink)
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
            const cssclasses = coerceToArray(coalesceAliases(data, ["cssclasses", "cssclass"]))
 | 
			
		||||
            if (cssclasses) data.cssclasses = cssclasses
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -31,7 +31,7 @@ export const CreatedModifiedDate: QuartzTransformerPlugin<Partial<Options>> = (u
 | 
			
		||||
  const opts = { ...defaultOptions, ...userOpts }
 | 
			
		||||
  return {
 | 
			
		||||
    name: "CreatedModifiedDate",
 | 
			
		||||
    markdownPlugins() {
 | 
			
		||||
    markdownPlugins(ctx) {
 | 
			
		||||
      return [
 | 
			
		||||
        () => {
 | 
			
		||||
          let repo: Repository | undefined = undefined
 | 
			
		||||
@@ -40,8 +40,8 @@ export const CreatedModifiedDate: QuartzTransformerPlugin<Partial<Options>> = (u
 | 
			
		||||
            let modified: MaybeDate = undefined
 | 
			
		||||
            let published: MaybeDate = undefined
 | 
			
		||||
 | 
			
		||||
            const fp = file.data.filePath!
 | 
			
		||||
            const fullFp = path.isAbsolute(fp) ? fp : path.posix.join(file.cwd, fp)
 | 
			
		||||
            const fp = file.data.relativePath!
 | 
			
		||||
            const fullFp = path.posix.join(ctx.argv.directory, fp)
 | 
			
		||||
            for (const source of opts.priority) {
 | 
			
		||||
              if (source === "filesystem") {
 | 
			
		||||
                const st = await fs.promises.stat(fullFp)
 | 
			
		||||
@@ -56,11 +56,11 @@ export const CreatedModifiedDate: QuartzTransformerPlugin<Partial<Options>> = (u
 | 
			
		||||
                  // Get a reference to the main git repo.
 | 
			
		||||
                  // It's either the same as the workdir,
 | 
			
		||||
                  // or 1+ level higher in case of a submodule/subtree setup
 | 
			
		||||
                  repo = Repository.discover(file.cwd)
 | 
			
		||||
                  repo = Repository.discover(ctx.argv.directory)
 | 
			
		||||
                }
 | 
			
		||||
 | 
			
		||||
                try {
 | 
			
		||||
                  modified ||= await repo.getFileLatestModifiedDateAsync(file.data.filePath!)
 | 
			
		||||
                  modified ||= await repo.getFileLatestModifiedDateAsync(fullFp)
 | 
			
		||||
                } catch {
 | 
			
		||||
                  console.log(
 | 
			
		||||
                    chalk.yellow(
 | 
			
		||||
 
 | 
			
		||||
@@ -54,7 +54,7 @@ export const OxHugoFlavouredMarkdown: QuartzTransformerPlugin<Partial<Options>>
 | 
			
		||||
    textTransform(_ctx, src) {
 | 
			
		||||
      if (opts.wikilinks) {
 | 
			
		||||
        src = src.toString()
 | 
			
		||||
        src = src.replaceAll(relrefRegex, (value, ...capture) => {
 | 
			
		||||
        src = src.replaceAll(relrefRegex, (_value, ...capture) => {
 | 
			
		||||
          const [text, link] = capture
 | 
			
		||||
          return `[${text}](${link})`
 | 
			
		||||
        })
 | 
			
		||||
@@ -62,7 +62,7 @@ export const OxHugoFlavouredMarkdown: QuartzTransformerPlugin<Partial<Options>>
 | 
			
		||||
 | 
			
		||||
      if (opts.removePredefinedAnchor) {
 | 
			
		||||
        src = src.toString()
 | 
			
		||||
        src = src.replaceAll(predefinedHeadingIdRegex, (value, ...capture) => {
 | 
			
		||||
        src = src.replaceAll(predefinedHeadingIdRegex, (_value, ...capture) => {
 | 
			
		||||
          const [headingText] = capture
 | 
			
		||||
          return headingText
 | 
			
		||||
        })
 | 
			
		||||
@@ -70,7 +70,7 @@ export const OxHugoFlavouredMarkdown: QuartzTransformerPlugin<Partial<Options>>
 | 
			
		||||
 | 
			
		||||
      if (opts.removeHugoShortcode) {
 | 
			
		||||
        src = src.toString()
 | 
			
		||||
        src = src.replaceAll(hugoShortcodeRegex, (value, ...capture) => {
 | 
			
		||||
        src = src.replaceAll(hugoShortcodeRegex, (_value, ...capture) => {
 | 
			
		||||
          const [scContent] = capture
 | 
			
		||||
          return scContent
 | 
			
		||||
        })
 | 
			
		||||
@@ -78,7 +78,7 @@ export const OxHugoFlavouredMarkdown: QuartzTransformerPlugin<Partial<Options>>
 | 
			
		||||
 | 
			
		||||
      if (opts.replaceFigureWithMdImg) {
 | 
			
		||||
        src = src.toString()
 | 
			
		||||
        src = src.replaceAll(figureTagRegex, (value, ...capture) => {
 | 
			
		||||
        src = src.replaceAll(figureTagRegex, (_value, ...capture) => {
 | 
			
		||||
          const [src] = capture
 | 
			
		||||
          return ``
 | 
			
		||||
        })
 | 
			
		||||
@@ -86,11 +86,11 @@ export const OxHugoFlavouredMarkdown: QuartzTransformerPlugin<Partial<Options>>
 | 
			
		||||
 | 
			
		||||
      if (opts.replaceOrgLatex) {
 | 
			
		||||
        src = src.toString()
 | 
			
		||||
        src = src.replaceAll(inlineLatexRegex, (value, ...capture) => {
 | 
			
		||||
        src = src.replaceAll(inlineLatexRegex, (_value, ...capture) => {
 | 
			
		||||
          const [eqn] = capture
 | 
			
		||||
          return `$${eqn}$`
 | 
			
		||||
        })
 | 
			
		||||
        src = src.replaceAll(blockLatexRegex, (value, ...capture) => {
 | 
			
		||||
        src = src.replaceAll(blockLatexRegex, (_value, ...capture) => {
 | 
			
		||||
          const [eqn] = capture
 | 
			
		||||
          return `$$${eqn}$$`
 | 
			
		||||
        })
 | 
			
		||||
 
 | 
			
		||||
@@ -1,10 +1,8 @@
 | 
			
		||||
import { QuartzTransformerPlugin } from "../types"
 | 
			
		||||
import { PluggableList } from "unified"
 | 
			
		||||
import { SKIP, visit } from "unist-util-visit"
 | 
			
		||||
import { visit } from "unist-util-visit"
 | 
			
		||||
import { ReplaceFunction, findAndReplace as mdastFindReplace } from "mdast-util-find-and-replace"
 | 
			
		||||
import { Root, Html, Paragraph, Text, Link, Parent } from "mdast"
 | 
			
		||||
import { Node } from "unist"
 | 
			
		||||
import { VFile } from "vfile"
 | 
			
		||||
import { BuildVisitor } from "unist-util-visit"
 | 
			
		||||
 | 
			
		||||
export interface Options {
 | 
			
		||||
@@ -34,21 +32,10 @@ const defaultOptions: Options = {
 | 
			
		||||
const orRegex = new RegExp(/{{or:(.*?)}}/, "g")
 | 
			
		||||
const TODORegex = new RegExp(/{{.*?\bTODO\b.*?}}/, "g")
 | 
			
		||||
const DONERegex = new RegExp(/{{.*?\bDONE\b.*?}}/, "g")
 | 
			
		||||
const videoRegex = new RegExp(/{{.*?\[\[video\]\].*?\:(.*?)}}/, "g")
 | 
			
		||||
const youtubeRegex = new RegExp(
 | 
			
		||||
  /{{.*?\[\[video\]\].*?(https?:\/\/(?:www\.)?youtu(?:be\.com\/watch\?v=|\.be\/)([\w\-\_]*)(&(amp;)?[\w\?=]*)?)}}/,
 | 
			
		||||
  "g",
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
// const multimediaRegex = new RegExp(/{{.*?\b(video|audio)\b.*?\:(.*?)}}/, "g")
 | 
			
		||||
 | 
			
		||||
const audioRegex = new RegExp(/{{.*?\[\[audio\]\].*?\:(.*?)}}/, "g")
 | 
			
		||||
const pdfRegex = new RegExp(/{{.*?\[\[pdf\]\].*?\:(.*?)}}/, "g")
 | 
			
		||||
const blockquoteRegex = new RegExp(/(\[\[>\]\])\s*(.*)/, "g")
 | 
			
		||||
const roamHighlightRegex = new RegExp(/\^\^(.+)\^\^/, "g")
 | 
			
		||||
const roamItalicRegex = new RegExp(/__(.+)__/, "g")
 | 
			
		||||
const tableRegex = new RegExp(/- {{.*?\btable\b.*?}}/, "g") /* TODO */
 | 
			
		||||
const attributeRegex = new RegExp(/\b\w+(?:\s+\w+)*::/, "g") /* TODO */
 | 
			
		||||
 | 
			
		||||
function isSpecialEmbed(node: Paragraph): boolean {
 | 
			
		||||
  if (node.children.length !== 2) return false
 | 
			
		||||
@@ -135,7 +122,7 @@ export const RoamFlavoredMarkdown: QuartzTransformerPlugin<Partial<Options> | un
 | 
			
		||||
      const plugins: PluggableList = []
 | 
			
		||||
 | 
			
		||||
      plugins.push(() => {
 | 
			
		||||
        return (tree: Root, file: VFile) => {
 | 
			
		||||
        return (tree: Root) => {
 | 
			
		||||
          const replacements: [RegExp, ReplaceFunction][] = []
 | 
			
		||||
 | 
			
		||||
          // Handle special embeds (audio, video, PDF)
 | 
			
		||||
 
 | 
			
		||||
@@ -4,7 +4,7 @@ import { ProcessedContent } from "./vfile"
 | 
			
		||||
import { QuartzComponent } from "../components/types"
 | 
			
		||||
import { FilePath } from "../util/path"
 | 
			
		||||
import { BuildCtx } from "../util/ctx"
 | 
			
		||||
import DepGraph from "../depgraph"
 | 
			
		||||
import { VFile } from "vfile"
 | 
			
		||||
 | 
			
		||||
export interface PluginTypes {
 | 
			
		||||
  transformers: QuartzTransformerPluginInstance[]
 | 
			
		||||
@@ -33,26 +33,33 @@ export type QuartzFilterPluginInstance = {
 | 
			
		||||
  shouldPublish(ctx: BuildCtx, content: ProcessedContent): boolean
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export type ChangeEvent = {
 | 
			
		||||
  type: "add" | "change" | "delete"
 | 
			
		||||
  path: FilePath
 | 
			
		||||
  file?: VFile
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export type QuartzEmitterPlugin<Options extends OptionType = undefined> = (
 | 
			
		||||
  opts?: Options,
 | 
			
		||||
) => QuartzEmitterPluginInstance
 | 
			
		||||
export type QuartzEmitterPluginInstance = {
 | 
			
		||||
  name: string
 | 
			
		||||
  emit(
 | 
			
		||||
  emit: (
 | 
			
		||||
    ctx: BuildCtx,
 | 
			
		||||
    content: ProcessedContent[],
 | 
			
		||||
    resources: StaticResources,
 | 
			
		||||
  ): Promise<FilePath[]> | AsyncGenerator<FilePath>
 | 
			
		||||
  ) => Promise<FilePath[]> | AsyncGenerator<FilePath>
 | 
			
		||||
  partialEmit?: (
 | 
			
		||||
    ctx: BuildCtx,
 | 
			
		||||
    content: ProcessedContent[],
 | 
			
		||||
    resources: StaticResources,
 | 
			
		||||
    changeEvents: ChangeEvent[],
 | 
			
		||||
  ) => Promise<FilePath[]> | AsyncGenerator<FilePath> | null
 | 
			
		||||
  /**
 | 
			
		||||
   * Returns the components (if any) that are used in rendering the page.
 | 
			
		||||
   * This helps Quartz optimize the page by only including necessary resources
 | 
			
		||||
   * for components that are actually used.
 | 
			
		||||
   */
 | 
			
		||||
  getQuartzComponents?: (ctx: BuildCtx) => QuartzComponent[]
 | 
			
		||||
  getDependencyGraph?(
 | 
			
		||||
    ctx: BuildCtx,
 | 
			
		||||
    content: ProcessedContent[],
 | 
			
		||||
    resources: StaticResources,
 | 
			
		||||
  ): Promise<DepGraph<FilePath>>
 | 
			
		||||
  externalResources?: ExternalResourcesFn
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -11,7 +11,7 @@ export async function emitContent(ctx: BuildCtx, content: ProcessedContent[]) {
 | 
			
		||||
  const perf = new PerfTimer()
 | 
			
		||||
  const log = new QuartzLogger(ctx.argv.verbose)
 | 
			
		||||
 | 
			
		||||
  log.start(`Emitting output files`)
 | 
			
		||||
  log.start(`Emitting files`)
 | 
			
		||||
 | 
			
		||||
  let emittedFiles = 0
 | 
			
		||||
  const staticResources = getStaticResourcesFromPlugins(ctx)
 | 
			
		||||
@@ -26,7 +26,7 @@ export async function emitContent(ctx: BuildCtx, content: ProcessedContent[]) {
 | 
			
		||||
            if (ctx.argv.verbose) {
 | 
			
		||||
              console.log(`[emit:${emitter.name}] ${file}`)
 | 
			
		||||
            } else {
 | 
			
		||||
              log.updateText(`Emitting output files: ${emitter.name} -> ${chalk.gray(file)}`)
 | 
			
		||||
              log.updateText(`${emitter.name} -> ${chalk.gray(file)}`)
 | 
			
		||||
            }
 | 
			
		||||
          }
 | 
			
		||||
        } else {
 | 
			
		||||
@@ -36,7 +36,7 @@ export async function emitContent(ctx: BuildCtx, content: ProcessedContent[]) {
 | 
			
		||||
            if (ctx.argv.verbose) {
 | 
			
		||||
              console.log(`[emit:${emitter.name}] ${file}`)
 | 
			
		||||
            } else {
 | 
			
		||||
              log.updateText(`Emitting output files: ${emitter.name} -> ${chalk.gray(file)}`)
 | 
			
		||||
              log.updateText(`${emitter.name} -> ${chalk.gray(file)}`)
 | 
			
		||||
            }
 | 
			
		||||
          }
 | 
			
		||||
        }
 | 
			
		||||
 
 | 
			
		||||
@@ -7,12 +7,13 @@ import { Root as HTMLRoot } from "hast"
 | 
			
		||||
import { MarkdownContent, ProcessedContent } from "../plugins/vfile"
 | 
			
		||||
import { PerfTimer } from "../util/perf"
 | 
			
		||||
import { read } from "to-vfile"
 | 
			
		||||
import { FilePath, FullSlug, QUARTZ, slugifyFilePath } from "../util/path"
 | 
			
		||||
import { FilePath, QUARTZ, slugifyFilePath } from "../util/path"
 | 
			
		||||
import path from "path"
 | 
			
		||||
import workerpool, { Promise as WorkerPromise } from "workerpool"
 | 
			
		||||
import { QuartzLogger } from "../util/log"
 | 
			
		||||
import { trace } from "../util/trace"
 | 
			
		||||
import { BuildCtx } from "../util/ctx"
 | 
			
		||||
import { BuildCtx, WorkerSerializableBuildCtx } from "../util/ctx"
 | 
			
		||||
import chalk from "chalk"
 | 
			
		||||
 | 
			
		||||
export type QuartzMdProcessor = Processor<MDRoot, MDRoot, MDRoot>
 | 
			
		||||
export type QuartzHtmlProcessor = Processor<undefined, MDRoot, HTMLRoot>
 | 
			
		||||
@@ -175,21 +176,42 @@ export async function parseMarkdown(ctx: BuildCtx, fps: FilePath[]): Promise<Pro
 | 
			
		||||
      process.exit(1)
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    const mdPromises: WorkerPromise<[MarkdownContent[], FullSlug[]]>[] = []
 | 
			
		||||
    for (const chunk of chunks(fps, CHUNK_SIZE)) {
 | 
			
		||||
      mdPromises.push(pool.exec("parseMarkdown", [ctx.buildId, argv, chunk]))
 | 
			
		||||
    const serializableCtx: WorkerSerializableBuildCtx = {
 | 
			
		||||
      buildId: ctx.buildId,
 | 
			
		||||
      argv: ctx.argv,
 | 
			
		||||
      allSlugs: ctx.allSlugs,
 | 
			
		||||
      allFiles: ctx.allFiles,
 | 
			
		||||
      incremental: ctx.incremental,
 | 
			
		||||
    }
 | 
			
		||||
    const mdResults: [MarkdownContent[], FullSlug[]][] =
 | 
			
		||||
      await WorkerPromise.all(mdPromises).catch(errorHandler)
 | 
			
		||||
 | 
			
		||||
    const childPromises: WorkerPromise<ProcessedContent[]>[] = []
 | 
			
		||||
    for (const [_, extraSlugs] of mdResults) {
 | 
			
		||||
      ctx.allSlugs.push(...extraSlugs)
 | 
			
		||||
    const textToMarkdownPromises: WorkerPromise<MarkdownContent[]>[] = []
 | 
			
		||||
    let processedFiles = 0
 | 
			
		||||
    for (const chunk of chunks(fps, CHUNK_SIZE)) {
 | 
			
		||||
      textToMarkdownPromises.push(pool.exec("parseMarkdown", [serializableCtx, chunk]))
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    const mdResults: Array<MarkdownContent[]> = await Promise.all(
 | 
			
		||||
      textToMarkdownPromises.map(async (promise) => {
 | 
			
		||||
        const result = await promise
 | 
			
		||||
        processedFiles += result.length
 | 
			
		||||
        log.updateText(`text->markdown ${chalk.gray(`${processedFiles}/${fps.length}`)}`)
 | 
			
		||||
        return result
 | 
			
		||||
      }),
 | 
			
		||||
    ).catch(errorHandler)
 | 
			
		||||
 | 
			
		||||
    const markdownToHtmlPromises: WorkerPromise<ProcessedContent[]>[] = []
 | 
			
		||||
    processedFiles = 0
 | 
			
		||||
    for (const [mdChunk, _] of mdResults) {
 | 
			
		||||
      childPromises.push(pool.exec("processHtml", [ctx.buildId, argv, mdChunk, ctx.allSlugs]))
 | 
			
		||||
      markdownToHtmlPromises.push(pool.exec("processHtml", [serializableCtx, mdChunk]))
 | 
			
		||||
    }
 | 
			
		||||
    const results: ProcessedContent[][] = await WorkerPromise.all(childPromises).catch(errorHandler)
 | 
			
		||||
    const results: ProcessedContent[][] = await Promise.all(
 | 
			
		||||
      markdownToHtmlPromises.map(async (promise) => {
 | 
			
		||||
        const result = await promise
 | 
			
		||||
        processedFiles += result.length
 | 
			
		||||
        log.updateText(`markdown->html ${chalk.gray(`${processedFiles}/${fps.length}`)}`)
 | 
			
		||||
        return result
 | 
			
		||||
      }),
 | 
			
		||||
    ).catch(errorHandler)
 | 
			
		||||
 | 
			
		||||
    res = results.flat()
 | 
			
		||||
    await pool.terminate()
 | 
			
		||||
 
 | 
			
		||||
@@ -1,12 +1,12 @@
 | 
			
		||||
import { QuartzConfig } from "../cfg"
 | 
			
		||||
import { FullSlug } from "./path"
 | 
			
		||||
import { FilePath, FullSlug } from "./path"
 | 
			
		||||
 | 
			
		||||
export interface Argv {
 | 
			
		||||
  directory: string
 | 
			
		||||
  verbose: boolean
 | 
			
		||||
  output: string
 | 
			
		||||
  serve: boolean
 | 
			
		||||
  fastRebuild: boolean
 | 
			
		||||
  watch: boolean
 | 
			
		||||
  port: number
 | 
			
		||||
  wsPort: number
 | 
			
		||||
  remoteDevHost?: string
 | 
			
		||||
@@ -18,4 +18,8 @@ export interface BuildCtx {
 | 
			
		||||
  argv: Argv
 | 
			
		||||
  cfg: QuartzConfig
 | 
			
		||||
  allSlugs: FullSlug[]
 | 
			
		||||
  allFiles: FilePath[]
 | 
			
		||||
  incremental: boolean
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export type WorkerSerializableBuildCtx = Omit<BuildCtx, "cfg">
 | 
			
		||||
 
 | 
			
		||||
@@ -1,18 +1,23 @@
 | 
			
		||||
import truncate from "ansi-truncate"
 | 
			
		||||
import readline from "readline"
 | 
			
		||||
 | 
			
		||||
export class QuartzLogger {
 | 
			
		||||
  verbose: boolean
 | 
			
		||||
  private spinnerInterval: NodeJS.Timeout | undefined
 | 
			
		||||
  private spinnerText: string = ""
 | 
			
		||||
  private updateSuffix: string = ""
 | 
			
		||||
  private spinnerIndex: number = 0
 | 
			
		||||
  private readonly spinnerChars = ["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"]
 | 
			
		||||
 | 
			
		||||
  constructor(verbose: boolean) {
 | 
			
		||||
    this.verbose = verbose
 | 
			
		||||
    const isInteractiveTerminal =
 | 
			
		||||
      process.stdout.isTTY && process.env.TERM !== "dumb" && !process.env.CI
 | 
			
		||||
    this.verbose = verbose || !isInteractiveTerminal
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  start(text: string) {
 | 
			
		||||
    this.spinnerText = text
 | 
			
		||||
 | 
			
		||||
    if (this.verbose) {
 | 
			
		||||
      console.log(text)
 | 
			
		||||
    } else {
 | 
			
		||||
@@ -20,14 +25,22 @@ export class QuartzLogger {
 | 
			
		||||
      this.spinnerInterval = setInterval(() => {
 | 
			
		||||
        readline.clearLine(process.stdout, 0)
 | 
			
		||||
        readline.cursorTo(process.stdout, 0)
 | 
			
		||||
        process.stdout.write(`${this.spinnerChars[this.spinnerIndex]} ${this.spinnerText}`)
 | 
			
		||||
 | 
			
		||||
        const columns = process.stdout.columns || 80
 | 
			
		||||
        let output = `${this.spinnerChars[this.spinnerIndex]} ${this.spinnerText}`
 | 
			
		||||
        if (this.updateSuffix) {
 | 
			
		||||
          output += `: ${this.updateSuffix}`
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        const truncated = truncate(output, columns)
 | 
			
		||||
        process.stdout.write(truncated)
 | 
			
		||||
        this.spinnerIndex = (this.spinnerIndex + 1) % this.spinnerChars.length
 | 
			
		||||
      }, 20)
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  updateText(text: string) {
 | 
			
		||||
    this.spinnerText = text
 | 
			
		||||
    this.updateSuffix = text
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  end(text?: string) {
 | 
			
		||||
 
 | 
			
		||||
@@ -260,7 +260,7 @@ export function endsWith(s: string, suffix: string): boolean {
 | 
			
		||||
  return s === suffix || s.endsWith("/" + suffix)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
function trimSuffix(s: string, suffix: string): string {
 | 
			
		||||
export function trimSuffix(s: string, suffix: string): string {
 | 
			
		||||
  if (endsWith(s, suffix)) {
 | 
			
		||||
    s = s.slice(0, -suffix.length)
 | 
			
		||||
  }
 | 
			
		||||
 
 | 
			
		||||
@@ -1,8 +1,8 @@
 | 
			
		||||
import sourceMapSupport from "source-map-support"
 | 
			
		||||
sourceMapSupport.install(options)
 | 
			
		||||
import cfg from "../quartz.config"
 | 
			
		||||
import { Argv, BuildCtx } from "./util/ctx"
 | 
			
		||||
import { FilePath, FullSlug } from "./util/path"
 | 
			
		||||
import { BuildCtx, WorkerSerializableBuildCtx } from "./util/ctx"
 | 
			
		||||
import { FilePath } from "./util/path"
 | 
			
		||||
import {
 | 
			
		||||
  createFileParser,
 | 
			
		||||
  createHtmlProcessor,
 | 
			
		||||
@@ -14,35 +14,24 @@ import { MarkdownContent, ProcessedContent } from "./plugins/vfile"
 | 
			
		||||
 | 
			
		||||
// only called from worker thread
 | 
			
		||||
export async function parseMarkdown(
 | 
			
		||||
  buildId: string,
 | 
			
		||||
  argv: Argv,
 | 
			
		||||
  partialCtx: WorkerSerializableBuildCtx,
 | 
			
		||||
  fps: FilePath[],
 | 
			
		||||
): Promise<[MarkdownContent[], FullSlug[]]> {
 | 
			
		||||
  // this is a hack
 | 
			
		||||
  // we assume markdown parsers can add to `allSlugs`,
 | 
			
		||||
  // but don't actually use them
 | 
			
		||||
  const allSlugs: FullSlug[] = []
 | 
			
		||||
): Promise<MarkdownContent[]> {
 | 
			
		||||
  const ctx: BuildCtx = {
 | 
			
		||||
    buildId,
 | 
			
		||||
    ...partialCtx,
 | 
			
		||||
    cfg,
 | 
			
		||||
    argv,
 | 
			
		||||
    allSlugs,
 | 
			
		||||
  }
 | 
			
		||||
  return [await createFileParser(ctx, fps)(createMdProcessor(ctx)), allSlugs]
 | 
			
		||||
  return await createFileParser(ctx, fps)(createMdProcessor(ctx))
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// only called from worker thread
 | 
			
		||||
export function processHtml(
 | 
			
		||||
  buildId: string,
 | 
			
		||||
  argv: Argv,
 | 
			
		||||
  partialCtx: WorkerSerializableBuildCtx,
 | 
			
		||||
  mds: MarkdownContent[],
 | 
			
		||||
  allSlugs: FullSlug[],
 | 
			
		||||
): Promise<ProcessedContent[]> {
 | 
			
		||||
  const ctx: BuildCtx = {
 | 
			
		||||
    buildId,
 | 
			
		||||
    ...partialCtx,
 | 
			
		||||
    cfg,
 | 
			
		||||
    argv,
 | 
			
		||||
    allSlugs,
 | 
			
		||||
  }
 | 
			
		||||
  return createMarkdownParser(ctx, mds)(createHtmlProcessor(ctx))
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -11,6 +11,8 @@
 | 
			
		||||
    "skipLibCheck": true,
 | 
			
		||||
    "allowSyntheticDefaultImports": true,
 | 
			
		||||
    "forceConsistentCasingInFileNames": true,
 | 
			
		||||
    "noUnusedLocals": true,
 | 
			
		||||
    "noUnusedParameters": true,
 | 
			
		||||
    "esModuleInterop": true,
 | 
			
		||||
    "jsx": "react-jsx",
 | 
			
		||||
    "jsxImportSource": "preact"
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user