refactor static and asset emission to be actual emitter plugins

This commit is contained in:
Jacky Zhao 2023-07-23 17:07:19 -07:00
parent 340e5e1472
commit a1985bfd28
18 changed files with 197 additions and 173 deletions

View File

@ -3,6 +3,7 @@ draft: true
---
## high priority
- attachments path
- https://help.obsidian.md/Editing+and+formatting/Tags#Nested+tags nested tags??
- watch mode for config/source code
@ -13,6 +14,7 @@ draft: true
- note/header/block transcludes: https://help.obsidian.md/Linking+notes+and+files/Embedding+files
## misc
- breadcrumbs component
- filetree component
- recent notes component

View File

@ -114,6 +114,8 @@ const config: QuartzConfig = {
enableSiteMap: true,
enableRSS: true,
}),
Plugin.Assets(),
Plugin.Static(),
],
},
}

View File

@ -14,16 +14,14 @@ import { FilePath } from "./path"
import chokidar from "chokidar"
import { ProcessedContent } from "./plugins/vfile"
import WebSocket, { WebSocketServer } from "ws"
interface Argv {
directory: string
verbose: boolean
output: string
serve: boolean
port: number
}
import { Argv, BuildCtx } from "./ctx"
async function buildQuartz(argv: Argv, version: string) {
const ctx: BuildCtx = {
argv,
cfg,
}
console.log(chalk.bgGreen.black(`\n Quartz v${version} \n`))
const perf = new PerfTimer()
const output = argv.output
@ -38,12 +36,10 @@ async function buildQuartz(argv: Argv, version: string) {
console.log(` Emitters: ${pluginNames("emitters").join(", ")}`)
}
// clean
perf.addEvent("clean")
await rimraf(output)
console.log(`Cleaned output directory \`${output}\` in ${perf.timeSince("clean")}`)
// glob
perf.addEvent("glob")
const fps = await globby("**/*.md", {
cwd: argv.directory,
@ -55,52 +51,51 @@ async function buildQuartz(argv: Argv, version: string) {
)
const filePaths = fps.map((fp) => `${argv.directory}${path.sep}${fp}` as FilePath)
const parsedFiles = await parseMarkdown(
cfg.plugins.transformers,
argv.directory,
filePaths,
argv.verbose,
)
const filteredContent = filterContent(cfg.plugins.filters, parsedFiles, argv.verbose)
await emitContent(argv.directory, output, cfg, filteredContent, argv.serve, argv.verbose)
const parsedFiles = await parseMarkdown(ctx, filePaths)
const filteredContent = filterContent(ctx, parsedFiles)
await emitContent(ctx, filteredContent)
console.log(chalk.green(`Done processing ${fps.length} files in ${perf.timeSince()}`))
if (argv.serve) {
await startServing(ctx, parsedFiles)
}
}
async function startServing(ctx: BuildCtx, initialContent: ProcessedContent[]) {
const { argv } = ctx
const wss = new WebSocketServer({ port: 3001 })
const connections: WebSocket[] = []
wss.on("connection", (ws) => connections.push(ws))
const ignored = await isGitIgnored()
const contentMap = new Map<FilePath, ProcessedContent>()
for (const content of parsedFiles) {
for (const content of initialContent) {
const [_tree, vfile] = content
contentMap.set(vfile.data.filePath!, content)
}
async function rebuild(fp: string, action: "add" | "change" | "unlink") {
perf.addEvent("rebuild")
const perf = new PerfTimer()
if (!ignored(fp)) {
console.log(chalk.yellow(`Detected change in ${fp}, rebuilding...`))
const fullPath = `${argv.directory}${path.sep}${fp}` as FilePath
try {
if (action === "add" || action === "change") {
const [parsedContent] = await parseMarkdown(
cfg.plugins.transformers,
argv.directory,
[fullPath],
argv.verbose,
)
const [parsedContent] = await parseMarkdown(ctx, [fullPath])
contentMap.set(fullPath, parsedContent)
} else if (action === "unlink") {
contentMap.delete(fullPath)
}
await rimraf(output)
await rimraf(argv.output)
const parsedFiles = [...contentMap.values()]
const filteredContent = filterContent(cfg.plugins.filters, parsedFiles, argv.verbose)
await emitContent(argv.directory, output, cfg, filteredContent, argv.serve, argv.verbose)
console.log(chalk.green(`Done rebuilding in ${perf.timeSince("rebuild")}`))
const filteredContent = filterContent(ctx, parsedFiles)
await emitContent(
ctx,
filteredContent,
)
console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
} catch {
console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`))
}
@ -122,7 +117,7 @@ async function buildQuartz(argv: Argv, version: string) {
const server = http.createServer(async (req, res) => {
await serveHandler(req, res, {
public: output,
public: argv.output,
directoryListing: false,
})
const status = res.statusCode
@ -137,7 +132,6 @@ async function buildQuartz(argv: Argv, version: string) {
server.listen(argv.port)
console.log(chalk.cyan(`Started a Quartz server listening at http://localhost:${argv.port}`))
console.log("hint: exit with ctrl+c")
}
}
export default async (argv: Argv, version: string) => {

View File

@ -10,6 +10,5 @@ export interface Argv {
export interface BuildCtx {
argv: Argv
version: string
cfg: QuartzConfig
}

View File

@ -13,12 +13,12 @@ export const AliasRedirects: QuartzEmitterPlugin = () => ({
getQuartzComponents() {
return []
},
async emit(contentFolder, _cfg, content, _resources, emit): Promise<FilePath[]> {
async emit({argv}, content, _resources, emit): Promise<FilePath[]> {
const fps: FilePath[] = []
for (const [_tree, file] of content) {
const ogSlug = canonicalizeServer(file.data.slug!)
const dir = path.relative(contentFolder, file.dirname ?? contentFolder)
const dir = path.relative(argv.directory, file.dirname ?? argv.directory)
let aliases: CanonicalSlug[] = []
if (file.data.frontmatter?.aliases) {

View File

@ -0,0 +1,36 @@
import { globbyStream } from "globby"
import {
FilePath, slugifyFilePath,
} from "../../path"
import { QuartzEmitterPlugin } from "../types"
import path from "path"
import fs from "fs"
export const Assets: QuartzEmitterPlugin = () => ({
name: "Assets",
getQuartzComponents() {
return []
},
async emit({ argv }, _content, _resources, _emit): Promise<FilePath[]> {
// glob all non MD/MDX/HTML files in content folder and copy it over
const assetsPath = path.join(argv.output, "assets")
const fps: FilePath[] = []
for await (const rawFp of globbyStream("**", {
ignore: ["**/*.md"],
cwd: argv.directory,
})) {
const fp = rawFp as FilePath
const ext = path.extname(fp)
const src = path.join(argv.directory, fp) as FilePath
const name = (slugifyFilePath(fp as FilePath) + ext) as FilePath
const dest = path.join(assetsPath, name) as FilePath
const dir = path.dirname(dest) as FilePath
await fs.promises.mkdir(dir, { recursive: true }) // ensure dir exists
await fs.promises.copyFile(src, dest)
fps.push(path.join("assets", fp) as FilePath)
}
return fps
},
})

View File

@ -68,7 +68,8 @@ export const ContentIndex: QuartzEmitterPlugin<Partial<Options>> = (opts) => {
opts = { ...defaultOptions, ...opts }
return {
name: "ContentIndex",
async emit(_contentDir, cfg, content, _resources, emit) {
async emit(ctx, content, _resources, emit) {
const cfg = ctx.cfg.configuration
const emitted: FilePath[] = []
const linkIndex: ContentIndex = new Map()
for (const [_tree, file] of content) {

View File

@ -22,7 +22,8 @@ export const ContentPage: QuartzEmitterPlugin<FullPageLayout> = (opts) => {
getQuartzComponents() {
return [Head, Header, Body, ...header, ...beforeBody, Content, ...left, ...right, Footer]
},
async emit(_contentDir, cfg, content, resources, emit): Promise<FilePath[]> {
async emit(ctx, content, resources, emit): Promise<FilePath[]> {
const cfg = ctx.cfg.configuration
const fps: FilePath[] = []
const allFiles = content.map((c) => c[1].data)
for (const [tree, file] of content) {

View File

@ -22,9 +22,10 @@ export const FolderPage: QuartzEmitterPlugin<FullPageLayout> = (opts) => {
getQuartzComponents() {
return [Head, Header, Body, ...header, ...beforeBody, Content, ...left, ...right, Footer]
},
async emit(_contentDir, cfg, content, resources, emit): Promise<FilePath[]> {
async emit(ctx, content, resources, emit): Promise<FilePath[]> {
const fps: FilePath[] = []
const allFiles = content.map((c) => c[1].data)
const cfg = ctx.cfg.configuration
const folders: Set<CanonicalSlug> = new Set(
allFiles.flatMap((data) => {

View File

@ -3,3 +3,5 @@ export { TagPage } from "./tagPage"
export { FolderPage } from "./folderPage"
export { ContentIndex } from "./contentIndex"
export { AliasRedirects } from "./aliases"
export { Assets } from "./assets"
export { Static } from "./static"

View File

@ -0,0 +1,21 @@
import { globby } from "globby"
import {
FilePath, QUARTZ
} from "../../path"
import { QuartzEmitterPlugin } from "../types"
import path from "path"
import fs from "fs"
export const Static: QuartzEmitterPlugin = () => ({
name: "Static",
getQuartzComponents() {
return []
},
async emit({ argv }, _content, _resources, _emit): Promise<FilePath[]> {
const staticPath = path.join(QUARTZ, "static")
const fps = await globby("*", { cwd: staticPath })
await fs.promises.cp(staticPath, path.join(argv.output, "static"), { recursive: true })
return fps.map(fp => path.join("static", fp)) as FilePath[]
},
})

View File

@ -21,9 +21,10 @@ export const TagPage: QuartzEmitterPlugin<FullPageLayout> = (opts) => {
getQuartzComponents() {
return [Head, Header, Body, ...header, ...beforeBody, Content, ...left, ...right, Footer]
},
async emit(_contentDir, cfg, content, resources, emit): Promise<FilePath[]> {
async emit(ctx, content, resources, emit): Promise<FilePath[]> {
const fps: FilePath[] = []
const allFiles = content.map((c) => c[1].data)
const cfg = ctx.cfg.configuration
const tags: Set<string> = new Set(allFiles.flatMap((data) => data.frontmatter?.tags ?? []))
const tagDescriptions: Record<string, ProcessedContent> = Object.fromEntries(

View File

@ -225,7 +225,7 @@ export const ObsidianFlavoredMarkdown: QuartzTransformerPlugin<Partial<Options>
findAndReplace(tree, commentRegex, (_value: string, ..._capture: string[]) => {
return {
type: "text",
value: ""
value: "",
}
})
}
@ -296,7 +296,8 @@ export const ObsidianFlavoredMarkdown: QuartzTransformerPlugin<Partial<Options>
node.data = {
hProperties: {
...(node.data?.hProperties ?? {}),
className: `callout ${collapse ? "is-collapsible" : ""} ${defaultState === "collapsed" ? "is-collapsed" : ""
className: `callout ${collapse ? "is-collapsible" : ""} ${
defaultState === "collapsed" ? "is-collapsed" : ""
}`,
"data-callout": calloutType,
"data-callout-fold": collapse,

View File

@ -4,6 +4,7 @@ import { ProcessedContent } from "./vfile"
import { GlobalConfiguration } from "../cfg"
import { QuartzComponent } from "../components/types"
import { FilePath, ServerSlug } from "../path"
import { BuildCtx } from "../ctx"
export interface PluginTypes {
transformers: QuartzTransformerPluginInstance[]
@ -37,8 +38,7 @@ export type QuartzEmitterPlugin<Options extends OptionType = undefined> = (
export type QuartzEmitterPluginInstance = {
name: string
emit(
contentDir: string,
cfg: GlobalConfiguration,
ctx: BuildCtx,
content: ProcessedContent[],
resources: StaticResources,
emitCallback: EmitCallback,

View File

@ -1,6 +1,5 @@
import path from "path"
import fs from "fs"
import { GlobalConfiguration, QuartzConfig } from "../cfg"
import { PerfTimer } from "../perf"
import {
ComponentResources,
@ -10,8 +9,7 @@ import {
} from "../plugins"
import { EmitCallback } from "../plugins/types"
import { ProcessedContent } from "../plugins/vfile"
import { FilePath, QUARTZ, slugifyFilePath } from "../path"
import { globbyStream } from "globby"
import { FilePath } from "../path"
// @ts-ignore
import spaRouterScript from "../components/scripts/spa.inline"
@ -24,13 +22,15 @@ import { StaticResources } from "../resources"
import { QuartzLogger } from "../log"
import { googleFontHref } from "../theme"
import { trace } from "../trace"
import { BuildCtx } from "../ctx"
function addGlobalPageResources(
cfg: GlobalConfiguration,
reloadScript: boolean,
ctx: BuildCtx,
staticResources: StaticResources,
componentResources: ComponentResources,
) {
const cfg = ctx.cfg.configuration
const reloadScript = ctx.argv.serve
staticResources.css.push(googleFontHref(cfg.theme))
// popovers
@ -85,19 +85,17 @@ function addGlobalPageResources(
}
export async function emitContent(
contentFolder: string,
output: string,
cfg: QuartzConfig,
ctx: BuildCtx,
content: ProcessedContent[],
reloadScript: boolean,
verbose: boolean,
) {
const { argv, cfg }= ctx
const contentFolder = argv.directory
const perf = new PerfTimer()
const log = new QuartzLogger(verbose)
const log = new QuartzLogger(ctx.argv.verbose)
log.start(`Emitting output files`)
const emit: EmitCallback = async ({ slug, ext, content }) => {
const pathToPage = path.join(output, slug + ext) as FilePath
const pathToPage = path.join(argv.output, slug + ext) as FilePath
const dir = path.dirname(pathToPage)
await fs.promises.mkdir(dir, { recursive: true })
await fs.promises.writeFile(pathToPage, content)
@ -113,11 +111,11 @@ export async function emitContent(
// important that this goes *after* component scripts
// as the "nav" event gets triggered here and we should make sure
// that everyone else had the chance to register a listener for it
addGlobalPageResources(cfg.configuration, reloadScript, staticResources, componentResources)
addGlobalPageResources(ctx, staticResources, componentResources)
let emittedFiles = 0
const emittedResources = await emitComponentResources(cfg.configuration, componentResources, emit)
if (verbose) {
if (argv.verbose) {
for (const file of emittedResources) {
emittedFiles += 1
console.log(`[emit:Resources] ${file}`)
@ -128,15 +126,14 @@ export async function emitContent(
for (const emitter of cfg.plugins.emitters) {
try {
const emitted = await emitter.emit(
contentFolder,
cfg.configuration,
ctx,
content,
staticResources,
emit,
)
emittedFiles += emitted.length
if (verbose) {
if (ctx.argv.verbose) {
for (const file of emitted) {
console.log(`[emit:${emitter.name}] ${file}`)
}
@ -147,31 +144,5 @@ export async function emitContent(
}
}
const staticPath = path.join(QUARTZ, "static")
await fs.promises.cp(staticPath, path.join(output, "static"), { recursive: true })
if (verbose) {
console.log(`[emit:Static] ${path.join("static", "**")}`)
}
// glob all non MD/MDX/HTML files in content folder and copy it over
const assetsPath = path.join(output, "assets")
for await (const rawFp of globbyStream("**", {
ignore: ["**/*.md"],
cwd: contentFolder,
})) {
const fp = rawFp as FilePath
const ext = path.extname(fp)
const src = path.join(contentFolder, fp) as FilePath
const name = (slugifyFilePath(fp as FilePath) + ext) as FilePath
const dest = path.join(assetsPath, name) as FilePath
const dir = path.dirname(dest) as FilePath
await fs.promises.mkdir(dir, { recursive: true }) // ensure dir exists
await fs.promises.copyFile(src, dest)
emittedFiles += 1
if (verbose) {
console.log(`[emit:Assets] ${path.join("assets", name)}`)
}
}
log.end(`Emitted ${emittedFiles} files to \`${output}\` in ${perf.timeSince()}`)
log.end(`Emitted ${emittedFiles} files to \`${argv.output}\` in ${perf.timeSince()}`)
}

View File

@ -1,18 +1,18 @@
import { BuildCtx } from "../ctx"
import { PerfTimer } from "../perf"
import { QuartzFilterPluginInstance } from "../plugins/types"
import { ProcessedContent } from "../plugins/vfile"
export function filterContent(
plugins: QuartzFilterPluginInstance[],
{ cfg, argv }: BuildCtx,
content: ProcessedContent[],
verbose: boolean,
): ProcessedContent[] {
const perf = new PerfTimer()
const initialLength = content.length
for (const plugin of plugins) {
for (const plugin of cfg.plugins.filters) {
const updatedContent = content.filter(plugin.shouldPublish)
if (verbose) {
if (argv.verbose) {
const diff = content.filter((x) => !updatedContent.includes(x))
for (const file of diff) {
console.log(`[filter:${plugin.name}] ${file[1].data.slug}`)

View File

@ -14,6 +14,7 @@ import workerpool, { Promise as WorkerPromise } from "workerpool"
import { QuartzTransformerPluginInstance } from "../plugins/types"
import { QuartzLogger } from "../log"
import { trace } from "../trace"
import { BuildCtx } from "../ctx"
export type QuartzProcessor = Processor<MDRoot, HTMLRoot, void>
export function createProcessor(transformers: QuartzTransformerPluginInstance[]): QuartzProcessor {
@ -72,13 +73,7 @@ async function transpileWorkerScript() {
})
}
export function createFileParser(
transformers: QuartzTransformerPluginInstance[],
baseDir: string,
fps: FilePath[],
allSlugs: ServerSlug[],
verbose: boolean,
) {
export function createFileParser({ argv, cfg }: BuildCtx, fps: FilePath[], allSlugs: ServerSlug[]) {
return async (processor: QuartzProcessor) => {
const res: ProcessedContent[] = []
for (const fp of fps) {
@ -89,12 +84,12 @@ export function createFileParser(
file.value = file.value.toString().trim()
// Text -> Text transforms
for (const plugin of transformers.filter((p) => p.textTransform)) {
for (const plugin of cfg.plugins.transformers.filter((p) => p.textTransform)) {
file.value = plugin.textTransform!(file.value)
}
// base data properties that plugins may use
file.data.slug = slugifyFilePath(path.relative(baseDir, file.path) as FilePath)
file.data.slug = slugifyFilePath(path.relative(argv.directory, file.path) as FilePath)
file.data.allSlugs = allSlugs
file.data.filePath = fp
@ -102,7 +97,7 @@ export function createFileParser(
const newAst = await processor.run(ast, file)
res.push([newAst, file])
if (verbose) {
if (argv.verbose) {
console.log(`[process] ${fp} -> ${file.data.slug}`)
}
} catch (err) {
@ -115,29 +110,25 @@ export function createFileParser(
}
}
export async function parseMarkdown(
transformers: QuartzTransformerPluginInstance[],
baseDir: string,
fps: FilePath[],
verbose: boolean,
): Promise<ProcessedContent[]> {
export async function parseMarkdown(ctx: BuildCtx, fps: FilePath[]): Promise<ProcessedContent[]> {
const { argv, cfg } = ctx
const perf = new PerfTimer()
const log = new QuartzLogger(verbose)
const log = new QuartzLogger(argv.verbose)
const CHUNK_SIZE = 128
let concurrency = fps.length < CHUNK_SIZE ? 1 : os.availableParallelism()
// get all slugs ahead of time as each thread needs a copy
const allSlugs = fps.map((fp) =>
slugifyFilePath(path.relative(baseDir, path.resolve(fp)) as FilePath),
slugifyFilePath(path.relative(argv.directory, path.resolve(fp)) as FilePath),
)
let res: ProcessedContent[] = []
log.start(`Parsing input files using ${concurrency} threads`)
if (concurrency === 1) {
try {
const processor = createProcessor(transformers)
const parse = createFileParser(transformers, baseDir, fps, allSlugs, verbose)
const processor = createProcessor(cfg.plugins.transformers)
const parse = createFileParser(ctx, fps, allSlugs)
res = await parse(processor)
} catch (error) {
log.end()
@ -153,7 +144,7 @@ export async function parseMarkdown(
const childPromises: WorkerPromise<ProcessedContent[]>[] = []
for (const chunk of chunks(fps, CHUNK_SIZE)) {
childPromises.push(pool.exec("parseFiles", [baseDir, chunk, allSlugs, verbose]))
childPromises.push(pool.exec("parseFiles", [argv, chunk, allSlugs]))
}
const results: ProcessedContent[][] = await WorkerPromise.all(childPromises)

View File

@ -1,17 +1,18 @@
import config from "../quartz.config"
import cfg from "../quartz.config"
import { Argv, BuildCtx } from "./ctx"
import { FilePath, ServerSlug } from "./path"
import { createFileParser, createProcessor } from "./processors/parse"
const transformers = config.plugins.transformers
const transformers = cfg.plugins.transformers
const processor = createProcessor(transformers)
// only called from worker thread
export async function parseFiles(
baseDir: string,
fps: FilePath[],
allSlugs: ServerSlug[],
verbose: boolean,
) {
const parse = createFileParser(transformers, baseDir, fps, allSlugs, verbose)
export async function parseFiles(argv: Argv, fps: FilePath[], allSlugs: ServerSlug[]) {
const ctx: BuildCtx = {
cfg,
argv,
}
const parse = createFileParser(ctx, fps, allSlugs)
return parse(processor)
}